add services
This commit is contained in:
commit
9913e14d69
14
README.md
Normal file
14
README.md
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
## Usage:
|
||||||
|
Clone repository:
|
||||||
|
|
||||||
|
`git clone https://git.drmlab.io/ImSp4rky/sp4rky-devine-services.git`
|
||||||
|
|
||||||
|
Add folder to `devine.yaml`:
|
||||||
|
|
||||||
|
```
|
||||||
|
directories:
|
||||||
|
services: "path/to/services"
|
||||||
|
```
|
||||||
|
See help text for each service:
|
||||||
|
|
||||||
|
`devine dl SERVICE -?`
|
||||||
414
services/ALL4/__init__.py
Normal file
414
services/ALL4/__init__.py
Normal file
@ -0,0 +1,414 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from http.cookiejar import MozillaCookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from Crypto.Util.Padding import unpad
|
||||||
|
from Cryptodome.Cipher import AES
|
||||||
|
from pywidevine.cdm import Cdm as WidevineCdm
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Subtitle, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class ALL4(Service):
|
||||||
|
"""
|
||||||
|
Service code for Channel 4's All4 streaming service (https://channel4.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Credentials
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title URL or slug as input:
|
||||||
|
https://www.channel4.com/programmes/taskmaster OR taskmaster
|
||||||
|
- Use on demand URL for directly downloading episodes:
|
||||||
|
https://www.channel4.com/programmes/taskmaster/on-demand/75588-002
|
||||||
|
- Both android and web/pc endpoints are checked for quality profiles.
|
||||||
|
If android is missing 1080p, it automatically falls back to web.
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("gb", "ie")
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?channel4\.com/programmes/)?(?P<id>[a-z0-9-]+)(?:/on-demand/(?P<vid>[0-9-]+))?"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="ALL4", short_help="https://channel4.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> ALL4:
|
||||||
|
return ALL4(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.authorization: str
|
||||||
|
self.asset_id: int
|
||||||
|
self.license_token: str
|
||||||
|
self.manifest: str
|
||||||
|
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"X-C4-Platform-Name": self.config["device"]["platform_name"],
|
||||||
|
"X-C4-Device-Type": self.config["device"]["device_type"],
|
||||||
|
"X-C4-Device-Name": self.config["device"]["device_name"],
|
||||||
|
"X-C4-App-Version": self.config["device"]["app_version"],
|
||||||
|
"X-C4-Optimizely-Datafile": self.config["device"]["optimizely_datafile"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not credential:
|
||||||
|
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||||
|
|
||||||
|
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||||
|
|
||||||
|
if cache and not cache.expired:
|
||||||
|
# cached
|
||||||
|
self.log.info(" + Using cached Tokens...")
|
||||||
|
tokens = cache.data
|
||||||
|
elif cache and cache.expired:
|
||||||
|
# expired, refresh
|
||||||
|
self.log.info("Refreshing cached Tokens")
|
||||||
|
r = self.session.post(
|
||||||
|
self.config["endpoints"]["login"],
|
||||||
|
headers={"authorization": f"Basic {self.config['android']['auth']}"},
|
||||||
|
data={
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"username": credential.username,
|
||||||
|
"password": credential.password,
|
||||||
|
"refresh_token": cache.data["refreshToken"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to refresh tokens: {r.text}")
|
||||||
|
|
||||||
|
if "error" in res:
|
||||||
|
self.log.error(f"Failed to refresh tokens: {res['errorMessage']}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
tokens = res
|
||||||
|
self.log.info(" + Refreshed")
|
||||||
|
else:
|
||||||
|
# new
|
||||||
|
headers = {"authorization": f"Basic {self.config['android']['auth']}"}
|
||||||
|
data = {
|
||||||
|
"grant_type": "password",
|
||||||
|
"username": credential.username,
|
||||||
|
"password": credential.password,
|
||||||
|
}
|
||||||
|
r = self.session.post(self.config["endpoints"]["login"], headers=headers, data=data)
|
||||||
|
try:
|
||||||
|
res = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to log in: {r.text}")
|
||||||
|
|
||||||
|
if "error" in res:
|
||||||
|
self.log.error(f"Failed to log in: {res['errorMessage']}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
tokens = res
|
||||||
|
self.log.info(" + Acquired tokens...")
|
||||||
|
|
||||||
|
cache.set(tokens, expiration=tokens["expiresIn"])
|
||||||
|
|
||||||
|
self.authorization = f"Bearer {tokens['accessToken']}"
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"expand": "default",
|
||||||
|
"q": self.title,
|
||||||
|
"limit": "100",
|
||||||
|
"offset": "0",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
results = r.json()
|
||||||
|
if isinstance(results["results"], list):
|
||||||
|
for result in results["results"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result["brand"].get("websafeTitle"),
|
||||||
|
title=result["brand"].get("title"),
|
||||||
|
description=result["brand"].get("description"),
|
||||||
|
label=result.get("label"),
|
||||||
|
url=result["brand"].get("href"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
title, on_demand = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "vid"))
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["title"].format(title=title),
|
||||||
|
params={"client": "android-mod", "deviceGroup": "mobile", "include": "extended-restart"},
|
||||||
|
headers={"Authorization": self.authorization},
|
||||||
|
)
|
||||||
|
if not r.ok:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
if on_demand is not None:
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=episode["programmeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["brand"]["title"],
|
||||||
|
season=episode["seriesNumber"],
|
||||||
|
number=episode["episodeNumber"],
|
||||||
|
name=episode["originalTitle"],
|
||||||
|
language="en",
|
||||||
|
data=episode["assetInfo"].get("streaming") or episode["assetInfo"].get("download"),
|
||||||
|
)
|
||||||
|
for episode in data["brand"]["episodes"]
|
||||||
|
if episode.get("assetInfo") and episode["programmeId"] == on_demand
|
||||||
|
]
|
||||||
|
if not episodes:
|
||||||
|
# Parse HTML of episode page to find title
|
||||||
|
data = self.get_html(self.title)
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=data["selectedEpisode"]["programmeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["brand"]["title"],
|
||||||
|
season=data["selectedEpisode"]["seriesNumber"] or 0,
|
||||||
|
number=data["selectedEpisode"]["episodeNumber"] or 0,
|
||||||
|
name=data["selectedEpisode"]["originalTitle"],
|
||||||
|
language="en",
|
||||||
|
data=data["selectedEpisode"],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
elif data["brand"]["programmeType"] == "FM":
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie["programmeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=data["brand"]["title"],
|
||||||
|
year=int(data["brand"]["summary"].split(" ")[0].strip().strip("()")),
|
||||||
|
language="en",
|
||||||
|
data=movie["assetInfo"].get("streaming") or movie["assetInfo"].get("download"),
|
||||||
|
)
|
||||||
|
for movie in data["brand"]["episodes"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["programmeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["brand"]["title"],
|
||||||
|
season=episode["seriesNumber"],
|
||||||
|
number=episode["episodeNumber"],
|
||||||
|
name=episode["originalTitle"],
|
||||||
|
language="en",
|
||||||
|
data=episode["assetInfo"].get("streaming") or episode["assetInfo"].get("download"),
|
||||||
|
)
|
||||||
|
for episode in data["brand"]["episodes"]
|
||||||
|
if episode.get("assetInfo")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
android_assets: tuple = self.android_playlist(title.id)
|
||||||
|
web_assets: tuple = self.web_playlist(title.id)
|
||||||
|
self.manifest, self.license_token, subtitle, data = self.sort_assets(title, android_assets, web_assets)
|
||||||
|
self.asset_id = int(title.data["assetId"])
|
||||||
|
|
||||||
|
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||||
|
tracks.videos[0].data = data
|
||||||
|
|
||||||
|
# manifest subtitles are sometimes empty even if they exist
|
||||||
|
# so we clear them and add the subtitles manually
|
||||||
|
tracks.subtitles.clear()
|
||||||
|
if subtitle is not None:
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(subtitle.encode()).hexdigest()[0:6],
|
||||||
|
url=subtitle,
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitle[-3:]),
|
||||||
|
language=title.language,
|
||||||
|
is_original_lang=True,
|
||||||
|
forced=False,
|
||||||
|
sdh=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.log.warning("- Subtitles are either missing or empty")
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
track = title.tracks.videos[0]
|
||||||
|
|
||||||
|
chapters = [
|
||||||
|
Chapter(
|
||||||
|
name=f"Chapter {i + 1:02}",
|
||||||
|
timestamp=datetime.fromtimestamp((ms / 1000), tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3],
|
||||||
|
)
|
||||||
|
for i, ms in enumerate(x["breakOffset"] for x in track.data["adverts"]["breaks"])
|
||||||
|
]
|
||||||
|
|
||||||
|
if track.data.get("endCredits", {}).get("squeezeIn"):
|
||||||
|
chapters.append(
|
||||||
|
Chapter(
|
||||||
|
name="Credits",
|
||||||
|
timestamp=datetime.fromtimestamp(
|
||||||
|
(track.data["endCredits"]["squeezeIn"] / 1000), tz=timezone.utc
|
||||||
|
).strftime("%H:%M:%S.%f")[:-3],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
payload = {
|
||||||
|
"message": base64.b64encode(challenge).decode("utf8"),
|
||||||
|
"token": self.license_token,
|
||||||
|
"request_id": self.asset_id,
|
||||||
|
"video": {"type": "ondemand", "url": self.manifest},
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.post(self.config["endpoints"]["license"], json=payload)
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(f"License request failed: {r.json()['status']['type']}")
|
||||||
|
|
||||||
|
return r.json()["license"]
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def sort_assets(self, title: Union[Movie, Episode], android_assets: tuple, web_assets: tuple) -> tuple:
|
||||||
|
android_heights = None
|
||||||
|
web_heights = None
|
||||||
|
|
||||||
|
if android_assets is not None:
|
||||||
|
try:
|
||||||
|
a_manifest, a_token, a_subtitle, data = android_assets
|
||||||
|
android_tracks = DASH.from_url(a_manifest, self.session).to_tracks(title.language)
|
||||||
|
android_heights = sorted([int(track.height) for track in android_tracks.videos], reverse=True)
|
||||||
|
except Exception:
|
||||||
|
android_heights = None
|
||||||
|
|
||||||
|
if web_assets is not None:
|
||||||
|
try:
|
||||||
|
b_manifest, b_token, b_subtitle, data = web_assets
|
||||||
|
session = self.session
|
||||||
|
session.headers.update(self.config["headers"])
|
||||||
|
web_tracks = DASH.from_url(b_manifest, session).to_tracks(title.language)
|
||||||
|
web_heights = sorted([int(track.height) for track in web_tracks.videos], reverse=True)
|
||||||
|
except Exception:
|
||||||
|
web_heights = None
|
||||||
|
|
||||||
|
if not android_heights and not web_heights:
|
||||||
|
self.log.error("Failed to request manifest data. If you're behind a VPN/proxy, you might be blocked")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not android_heights or android_heights[0] < 1080:
|
||||||
|
lic_token = self.decrypt_token(b_token, client="WEB")
|
||||||
|
return b_manifest, lic_token, b_subtitle, data
|
||||||
|
else:
|
||||||
|
lic_token = self.decrypt_token(a_token, client="ANDROID")
|
||||||
|
return a_manifest, lic_token, a_subtitle, data
|
||||||
|
|
||||||
|
def android_playlist(self, video_id: str) -> tuple:
|
||||||
|
url = self.config["android"]["vod"].format(video_id=video_id)
|
||||||
|
headers = {"authorization": self.authorization}
|
||||||
|
|
||||||
|
r = self.session.get(url=url, headers=headers)
|
||||||
|
if not r.ok:
|
||||||
|
self.log.warning("Request for Android endpoint returned %s", r)
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = json.loads(r.content)
|
||||||
|
manifest = data["videoProfiles"][0]["streams"][0]["uri"]
|
||||||
|
token = data["videoProfiles"][0]["streams"][0]["token"]
|
||||||
|
subtitle = next(
|
||||||
|
(x["url"] for x in data["subtitlesAssets"] if x["url"].endswith(".vtt")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return manifest, token, subtitle, data
|
||||||
|
|
||||||
|
def web_playlist(self, video_id: str) -> tuple:
|
||||||
|
url = self.config["web"]["vod"].format(programmeId=video_id)
|
||||||
|
r = self.session.get(url, headers=self.config["headers"])
|
||||||
|
if not r.ok:
|
||||||
|
self.log.warning("Request for WEB endpoint returned %s", r)
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = json.loads(r.content)
|
||||||
|
|
||||||
|
for item in data["videoProfiles"]:
|
||||||
|
if item["name"] == "dashwv-dyn-stream-1":
|
||||||
|
token = item["streams"][0]["token"]
|
||||||
|
manifest = item["streams"][0]["uri"]
|
||||||
|
|
||||||
|
subtitle = next(
|
||||||
|
(x["url"] for x in data["subtitlesAssets"] if x["url"].endswith(".vtt")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return manifest, token, subtitle, data
|
||||||
|
|
||||||
|
def decrypt_token(self, token: str, client: str) -> tuple:
|
||||||
|
if client == "ANDROID":
|
||||||
|
key = self.config["android"]["key"]
|
||||||
|
iv = self.config["android"]["iv"]
|
||||||
|
|
||||||
|
if client == "WEB":
|
||||||
|
key = self.config["web"]["key"]
|
||||||
|
iv = self.config["web"]["iv"]
|
||||||
|
|
||||||
|
if isinstance(token, str):
|
||||||
|
token = base64.b64decode(token)
|
||||||
|
cipher = AES.new(
|
||||||
|
key=base64.b64decode(key),
|
||||||
|
iv=base64.b64decode(iv),
|
||||||
|
mode=AES.MODE_CBC,
|
||||||
|
)
|
||||||
|
data = unpad(cipher.decrypt(token), AES.block_size)
|
||||||
|
dec_token = data.decode().split("|")[1]
|
||||||
|
return dec_token.strip()
|
||||||
|
|
||||||
|
def get_html(self, url: str) -> dict:
|
||||||
|
r = self.session.get(url=url, headers=self.config["headers"])
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
init_data = re.search(
|
||||||
|
"<script>window.__PARAMS__ = (.*)</script>",
|
||||||
|
"".join(r.content.decode().replace("\u200c", "").replace("\r\n", "").replace("undefined", "null")),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = json.loads(init_data.group(1))
|
||||||
|
return data["initialData"]
|
||||||
|
except Exception:
|
||||||
|
self.log.error(f"Failed to get episode for {url}")
|
||||||
|
sys.exit(1)
|
||||||
27
services/ALL4/config.yaml
Normal file
27
services/ALL4/config.yaml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
headers:
|
||||||
|
Accept-Language: en-US,en;q=0.8
|
||||||
|
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.75 Safari/537.36
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
login: https://api.channel4.com/online/v2/auth/token
|
||||||
|
title: https://api.channel4.com/online/v1/views/content-hubs/{title}.json
|
||||||
|
license: https://c4.eme.lp.aws.redbeemedia.com/wvlicenceproxy-service/widevine/acquire
|
||||||
|
search: https://all4nav.channel4.com/v1/api/search
|
||||||
|
|
||||||
|
android:
|
||||||
|
key: QVlESUQ4U0RGQlA0TThESA=="
|
||||||
|
iv: MURDRDAzODNES0RGU0w4Mg=="
|
||||||
|
auth: MzZVVUN0OThWTVF2QkFnUTI3QXU4ekdIbDMxTjlMUTE6Sllzd3lIdkdlNjJWbGlrVw==
|
||||||
|
vod: https://api.channel4.com/online/v1/vod/stream/{video_id}?client=android-mod
|
||||||
|
|
||||||
|
web:
|
||||||
|
key: bjljTGllWWtxd3pOQ3F2aQ==
|
||||||
|
iv: b2R6Y1UzV2RVaVhMdWNWZA==
|
||||||
|
vod: https://www.channel4.com/vod/stream/{programmeId}
|
||||||
|
|
||||||
|
device:
|
||||||
|
platform_name: android
|
||||||
|
device_type: mobile
|
||||||
|
device_name: "Sony C6903 (C6903)"
|
||||||
|
app_version: "android_app:9.4.2"
|
||||||
|
optimizely_datafile: "2908"
|
||||||
548
services/AMZN/__init__.py
Normal file
548
services/AMZN/__init__.py
Normal file
@ -0,0 +1,548 @@
|
|||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections import defaultdict
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
from langcodes import Language
|
||||||
|
from pywidevine.device import DeviceTypes
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Subtitle, Tracks, Track, Video
|
||||||
|
from devine.core.utilities import is_close_match
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
|
||||||
|
|
||||||
|
class AMZN(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for Amazon Prime Video (https://primevideo.com).
|
||||||
|
Based on original code for VT, credit to original author.
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L1: 2160p
|
||||||
|
L3 Chrome: 720p, 1080p
|
||||||
|
L3 Android: 540p
|
||||||
|
PlayReady:
|
||||||
|
SL3: 2160p
|
||||||
|
SL2: 1080p
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be asin only grabbed from URL or other means, e.g.:
|
||||||
|
B0B8KZPQBX OR amzn1.dv.gti.7aa9f19e-9c00-40e3-98e7-b365678492dd
|
||||||
|
- Use the --lang LANG_RANGE option to request non-english tracks
|
||||||
|
- Use --bitrate CBR to request Constant Bitrate:
|
||||||
|
devine dl -w s01e01 AMZN -b CBR B0B8KZPQBX
|
||||||
|
- Use --quality SD to request SD tracks (default is HD):
|
||||||
|
devine dl -w s01e01 AMZN -q SD B0B8KZPQBX
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Written specifically for use with ChromeCDM, with Android L3 as fallback.
|
||||||
|
- Region is chosen automatically based on domain extension found in cookies.
|
||||||
|
- Loading tracks could take a few seconds if the title has many audio tracks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# GEOFENCE = ("",)
|
||||||
|
ALIASES = ("amazon", "prime")
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?(?P<domain>amazon\.(?P<region>com|co\.uk|de|co\.jp)|primevideo\.com)(?:/.+)?/)?(?P<id>[A-Z0-9]{10,}|amzn1\.dv\.gti\.[a-f0-9-]+)" # noqa: E501
|
||||||
|
|
||||||
|
AUDIO_CODEC_MAP = {"AAC": "mp4a", "EC3": "ec-3"}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="AMZN", short_help="https://primevideo.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.option(
|
||||||
|
"-b",
|
||||||
|
"--bitrate",
|
||||||
|
default="VBR+CBR",
|
||||||
|
type=click.Choice(["VBR", "CBR", "VBR+CBR"], case_sensitive=False),
|
||||||
|
help="Video Bitrate Mode to download in. VBR=Variable Bitrate, CBR=Constant Bitrate.",
|
||||||
|
)
|
||||||
|
# UHD, HD, SD. UHD only returns HEVC, ever, even for <=HD only content
|
||||||
|
@click.option(
|
||||||
|
"-q",
|
||||||
|
"--quality",
|
||||||
|
default="HD",
|
||||||
|
type=click.Choice(["SD", "HD", "UHD"], case_sensitive=False),
|
||||||
|
help="Manifest quality to request.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-am",
|
||||||
|
"--audio-manifest",
|
||||||
|
default=None,
|
||||||
|
type=click.Choice(["VBR", "CBR", "H265"], case_sensitive=False),
|
||||||
|
help="Manifest to use for audio. Defaults to H265 if the video manifest is missing 640k audio.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-aq",
|
||||||
|
"--audio-quality",
|
||||||
|
default="SD",
|
||||||
|
type=click.Choice(["SD", "HD", "UHD"], case_sensitive=False),
|
||||||
|
help="Manifest quality to request for audio. Defaults to the same as --quality.",
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return AMZN(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title, bitrate, quality, audio_manifest, audio_quality):
|
||||||
|
m = self.parse_title(ctx, title)
|
||||||
|
self.domain = m.get("domain")
|
||||||
|
self.domain_region = m.get("region")
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.bitrate = bitrate
|
||||||
|
self.quality = quality
|
||||||
|
self.audio_manifest = audio_manifest
|
||||||
|
self.audio_quality = audio_quality
|
||||||
|
|
||||||
|
self.vcodec = "H265" if ctx.parent.params.get("vcodec") == Video.Codec.HEVC else "H264"
|
||||||
|
self.acodec = ctx.parent.params.get("acodec")
|
||||||
|
|
||||||
|
self.cdm = ctx.obj.cdm
|
||||||
|
self.region = {}
|
||||||
|
self.endpoints = {}
|
||||||
|
self.device = {}
|
||||||
|
|
||||||
|
self.pv = self.domain == "primevideo.com"
|
||||||
|
self.device_token = None
|
||||||
|
self.device_id = None
|
||||||
|
self.customer_id = None
|
||||||
|
self.client_id = "f22dbddb-ef2c-48c5-8876-bed0d47594fd"
|
||||||
|
|
||||||
|
if self.cdm.device_type != DeviceTypes.CHROME:
|
||||||
|
self.log.info("Setting manifest quality to SD for Android L3 (use -q HD to override)")
|
||||||
|
self.quality = "SD"
|
||||||
|
|
||||||
|
# Abstracted functions
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not cookies:
|
||||||
|
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||||
|
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
self.configure()
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
res = self.session.get(
|
||||||
|
url=self.endpoints["detail"],
|
||||||
|
params={"titleID": self.title, "isElcano": "1", "sections": "Atf"},
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
).json()["widgets"]
|
||||||
|
|
||||||
|
entity = res["header"]["detail"].get("entityType")
|
||||||
|
if not entity:
|
||||||
|
self.log.error(" - Failed to get entity type")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if entity == "Movie":
|
||||||
|
metadata = res["header"]["detail"]
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=metadata.get("catalogId"),
|
||||||
|
year=metadata.get("releaseYear"),
|
||||||
|
name=metadata.get("title"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=metadata,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif entity == "TV Show":
|
||||||
|
seasons = [x.get("titleID") for x in res["seasonSelector"]]
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in seasons:
|
||||||
|
res = self.session.get(
|
||||||
|
url=self.endpoints["detail"],
|
||||||
|
params={"titleID": season, "isElcano": "1", "sections": "Btf"},
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
).json()["widgets"]
|
||||||
|
|
||||||
|
# cards = [x["detail"] for x in as_list(res["titleContent"][0]["cards"])]
|
||||||
|
cards = [
|
||||||
|
{**x["detail"], "sequenceNumber": x["self"]["sequenceNumber"]}
|
||||||
|
for x in res["episodeList"]["episodes"]
|
||||||
|
]
|
||||||
|
|
||||||
|
product_details = res["productDetails"]["detail"]
|
||||||
|
|
||||||
|
episodes.extend(
|
||||||
|
Episode(
|
||||||
|
id_=title.get("titleId") or title["catalogId"],
|
||||||
|
title=product_details.get("parentTitle") or product_details["title"],
|
||||||
|
year=title.get("releaseYear") or product_details.get("releaseYear"),
|
||||||
|
season=product_details.get("seasonNumber"),
|
||||||
|
number=title.get("sequenceNumber"),
|
||||||
|
name=title.get("title"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=title,
|
||||||
|
)
|
||||||
|
for title in cards
|
||||||
|
if title["entityType"] == "TV Show"
|
||||||
|
)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
manifest = self.get_manifest(title, video_codec=self.vcodec, bitrate_mode=self.bitrate, quality=self.quality)
|
||||||
|
|
||||||
|
if "rightsException" in manifest["returnedTitleRendition"]["selectedEntitlement"]:
|
||||||
|
self.log.error(" - The profile used does not have the rights to this title.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
self.customer_id = manifest["returnedTitleRendition"]["selectedEntitlement"]["grantedByCustomerId"]
|
||||||
|
|
||||||
|
chosen_manifest = self.choose_manifest(manifest)
|
||||||
|
mpd_url = self.clean_mpd_url(chosen_manifest["avUrlInfoList"][0]["url"])
|
||||||
|
|
||||||
|
tracks = DASH.from_url(url=mpd_url, session=self.session).to_tracks(
|
||||||
|
language=re.sub(r"_dialog.*$", "", manifest["playbackUrls"]["defaultAudioTrackId"])
|
||||||
|
)
|
||||||
|
tracks.videos[0].data["timecodes"] = manifest.get("transitionTimecodes")
|
||||||
|
|
||||||
|
audios = defaultdict(list)
|
||||||
|
for audio in tracks.audio:
|
||||||
|
audios[audio.language].append(audio)
|
||||||
|
|
||||||
|
need_separate_audio = False
|
||||||
|
for lang in audios:
|
||||||
|
if not any((x.bitrate or 0) >= 640000 for x in audios[lang]):
|
||||||
|
need_separate_audio = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if need_separate_audio:
|
||||||
|
manifest_type = self.audio_manifest or "H265"
|
||||||
|
self.log.info(f"Getting audio from {manifest_type} manifest for potential higher bitrate or better codec")
|
||||||
|
audio_manifest = self.get_manifest(
|
||||||
|
title,
|
||||||
|
"H265" if manifest_type == "H265" else "H264",
|
||||||
|
"VBR" if manifest_type != "CBR" else "CBR",
|
||||||
|
self.audio_quality or self.quality,
|
||||||
|
)
|
||||||
|
audio_mpd_url = self.clean_mpd_url(self.choose_manifest(audio_manifest)["avUrlInfoList"][0]["url"])
|
||||||
|
self.log.debug(audio_mpd_url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
audio_mpd = DASH.from_url(url=audio_mpd_url, session=self.session).to_tracks(language="en")
|
||||||
|
except KeyError:
|
||||||
|
self.log.warning(f" - Title has no {self.audio_manifest} stream, cannot get higher quality audio")
|
||||||
|
else:
|
||||||
|
tracks.audio = audio_mpd.audio
|
||||||
|
|
||||||
|
for audio in tracks.audio:
|
||||||
|
# Amazon @lang is just the lang code, no dialect, @audioTrackId has it.
|
||||||
|
audio_track_id = audio.data["dash"]["adaptation_set"].get("audioTrackId")
|
||||||
|
sub_type = audio.data["dash"]["adaptation_set"].get("audioTrackSubtype")
|
||||||
|
if audio_track_id is not None:
|
||||||
|
audio.language = Language.get(audio_track_id.split("_")[0]) # e.g. es-419_ec3_blabla
|
||||||
|
if sub_type is not None and "descriptive" in sub_type.lower():
|
||||||
|
audio.descriptive = True
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
rep_base = track.data["dash"]["representation"].find("BaseURL")
|
||||||
|
if rep_base is not None:
|
||||||
|
base_url = os.path.dirname(track.url)
|
||||||
|
track_base = rep_base.text
|
||||||
|
track.url = f"{base_url}/{track_base}"
|
||||||
|
track.descriptor = Track.Descriptor.URL
|
||||||
|
track.data["dash"].clear()
|
||||||
|
|
||||||
|
# filter out boosted, descriptive, and lowest bitrate audio tracks
|
||||||
|
# tracks.audio = [
|
||||||
|
# audio
|
||||||
|
# for audio in tracks.audio
|
||||||
|
# if audio.data["dash"]["adaptation_set"].get("audioTrackSubtype", "").lower() == "dialog"
|
||||||
|
# and int(audio.data["dash"]["adaptation_set"].get("maxBandwidth", 0)) >= 192000
|
||||||
|
# ]
|
||||||
|
|
||||||
|
for sub in manifest.get("subtitleUrls", []) + manifest.get("forcedNarratives", []):
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=sub.get(
|
||||||
|
"timedTextTrackId", f"{sub['languageCode']}_{sub['type']}_{sub['subtype']}_{sub['index']}"
|
||||||
|
),
|
||||||
|
url=os.path.splitext(sub["url"])[0] + ".srt", # DFXP -> SRT forcefully seems to work fine
|
||||||
|
codec=Subtitle.Codec.from_codecs("srt"), # sub["format"].lower(),
|
||||||
|
language=sub["languageCode"],
|
||||||
|
forced="forced" in sub["displayName"],
|
||||||
|
sdh=sub["type"].lower() == "sdh", # TODO: what other sub types? cc? forced?
|
||||||
|
),
|
||||||
|
warn_only=True,
|
||||||
|
) # expecting possible dupes, ignore
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
timecodes = title.tracks.videos[0].data.get("timecodes")
|
||||||
|
if not timecodes:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
elements = [x for x in timecodes.get("skipElements", [])]
|
||||||
|
|
||||||
|
chapters = [
|
||||||
|
Chapter(
|
||||||
|
name=x.get("elementType", "Chapter"),
|
||||||
|
timestamp=x.get("startTimecodeMs"),
|
||||||
|
)
|
||||||
|
for x in elements
|
||||||
|
]
|
||||||
|
|
||||||
|
if timecodes.get("endCreditsStart"):
|
||||||
|
chapters.append(
|
||||||
|
Chapter(
|
||||||
|
name="CREDITS",
|
||||||
|
timestamp=timecodes.get("endCreditsStart"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return self.config["certificate"]
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track) -> None:
|
||||||
|
response = self.session.post(
|
||||||
|
url=self.endpoints["license"],
|
||||||
|
params={
|
||||||
|
"asin": title.id,
|
||||||
|
"consumptionType": "Streaming",
|
||||||
|
"desiredResources": "Widevine2License",
|
||||||
|
"deviceTypeID": self.device["device_type"],
|
||||||
|
"deviceID": self.device_id,
|
||||||
|
"firmware": 1,
|
||||||
|
"gascEnabled": str(self.pv).lower(),
|
||||||
|
"marketplaceID": self.region["marketplace_id"],
|
||||||
|
"resourceUsage": "ImmediateConsumption",
|
||||||
|
"videoMaterialType": "Feature",
|
||||||
|
"operatingSystemName": "Linux" if self.quality == "SD" else "Windows",
|
||||||
|
"operatingSystemVersion": "unknown" if self.quality == "SD" else "10.0",
|
||||||
|
"customerID": self.customer_id,
|
||||||
|
"deviceDrmOverride": "CENC",
|
||||||
|
"deviceStreamingTechnologyOverride": "DASH",
|
||||||
|
"deviceVideoQualityOverride": "HD",
|
||||||
|
"deviceHdrFormatsOverride": "None",
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"Accept": "application/json",
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
"Authorization": f"Bearer {self.device_token}",
|
||||||
|
},
|
||||||
|
data={
|
||||||
|
"widevine2Challenge": base64.b64encode(challenge).decode(),
|
||||||
|
"includeHdcpTestKeyInLicense": "false",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
if "errorsByResource" in response:
|
||||||
|
error_code = response["errorsByResource"]["Widevine2License"]
|
||||||
|
if "errorCode" in error_code:
|
||||||
|
error_code = error_code["errorCode"]
|
||||||
|
elif "type" in error_code:
|
||||||
|
error_code = error_code["type"]
|
||||||
|
|
||||||
|
if error_code in ["PRS.NoRights.AnonymizerIP", "PRS.NoRights.NotOwned"]:
|
||||||
|
self.log.error("Proxy detected, Unable to License")
|
||||||
|
elif error_code == "PRS.Dependency.DRM.Widevine.UnsupportedCdmVersion":
|
||||||
|
self.log.error("Cdm version not supported")
|
||||||
|
else:
|
||||||
|
self.log.error(f" x Error from Amazon's License Server: [{error_code}]")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return response["widevine2License"]["license"]
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
if len(self.title) > 10 and not (self.domain or "").startswith("amazon."):
|
||||||
|
self.pv = True
|
||||||
|
|
||||||
|
self.log.info("Getting account region")
|
||||||
|
self.region = self.get_region()
|
||||||
|
if not self.region:
|
||||||
|
self.log.error(" - Failed to get Amazon account region")
|
||||||
|
sys.exit(1)
|
||||||
|
# self.GEOFENCE.append(self.region["code"])
|
||||||
|
self.log.info(f" + Region: {self.region['code'].upper()}")
|
||||||
|
|
||||||
|
# endpoints must be prepared AFTER region data is retrieved
|
||||||
|
self.endpoints = self.prepare_endpoints(self.config["endpoints"], self.region)
|
||||||
|
|
||||||
|
self.session.headers.update({"Origin": f"https://{self.region['base']}"})
|
||||||
|
|
||||||
|
self.device_id = hashlib.sha224(("CustomerID" + self.session.headers["User-Agent"]).encode("utf-8")).hexdigest()
|
||||||
|
self.device = {"device_type": self.config["device_types"]["browser"]}
|
||||||
|
|
||||||
|
def get_region(self):
|
||||||
|
domain_region = self.get_domain_region()
|
||||||
|
if not domain_region:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
region = self.config["regions"].get(domain_region)
|
||||||
|
if not region:
|
||||||
|
raise self.log.exit(f" - There's no region configuration data for the region: {domain_region}")
|
||||||
|
|
||||||
|
region["code"] = domain_region
|
||||||
|
|
||||||
|
if self.pv:
|
||||||
|
res = self.session.get("https://www.primevideo.com").text
|
||||||
|
match = re.search(r'ue_furl *= *([\'"])fls-(na|eu|fe)\.amazon\.[a-z.]+\1', res)
|
||||||
|
if match:
|
||||||
|
pv_region = match.group(2).lower()
|
||||||
|
else:
|
||||||
|
raise self.log.exit(" - Failed to get PrimeVideo region")
|
||||||
|
pv_region = {"na": "atv-ps"}.get(pv_region, f"atv-ps-{pv_region}")
|
||||||
|
region["base_manifest"] = f"{pv_region}.primevideo.com"
|
||||||
|
region["base"] = "www.primevideo.com"
|
||||||
|
|
||||||
|
return region
|
||||||
|
|
||||||
|
def get_domain_region(self):
|
||||||
|
"""Get the region of the cookies from the domain."""
|
||||||
|
tld = (self.domain_region or "").split(".")[-1]
|
||||||
|
if not tld:
|
||||||
|
domains = [x.domain for x in self.session.cookies if x.domain_specified]
|
||||||
|
tld = next((x.split(".")[-1] for x in domains if x.startswith((".amazon.", ".primevideo."))), None)
|
||||||
|
return {"com": "us", "uk": "gb"}.get(tld, tld)
|
||||||
|
|
||||||
|
def prepare_endpoint(self, name, uri, region):
|
||||||
|
if name in ("browse", "playback", "license", "xray"):
|
||||||
|
return f"https://{(region['base_manifest'])}{uri}"
|
||||||
|
if name in ("detail", "ontv", "devicelink"):
|
||||||
|
return f"https://{region['base']}{uri}"
|
||||||
|
if name in ("codepair", "register", "token"):
|
||||||
|
return f"https://{self.config['regions']['us']['base_api']}{uri}"
|
||||||
|
raise ValueError(f"Unknown endpoint: {name}")
|
||||||
|
|
||||||
|
def prepare_endpoints(self, endpoints, region):
|
||||||
|
return {k: self.prepare_endpoint(k, v, region) for k, v in endpoints.items()}
|
||||||
|
|
||||||
|
def choose_manifest(self, manifest):
|
||||||
|
"""Get manifest URL for the title based on CDN weight (or specified CDN)."""
|
||||||
|
manifest = sorted(manifest["audioVideoUrls"]["avCdnUrlSets"], key=lambda x: int(x["cdnWeightsRank"]))[0]
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
def get_manifest(self, title, video_codec, bitrate_mode, quality, hdr=None):
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.endpoints["playback"],
|
||||||
|
params={
|
||||||
|
"asin": title.id,
|
||||||
|
"consumptionType": "Streaming",
|
||||||
|
"desiredResources": ",".join(
|
||||||
|
[
|
||||||
|
"PlaybackUrls",
|
||||||
|
"AudioVideoUrls",
|
||||||
|
"CatalogMetadata",
|
||||||
|
"ForcedNarratives",
|
||||||
|
"SubtitlePresets",
|
||||||
|
"SubtitleUrls",
|
||||||
|
"TransitionTimecodes",
|
||||||
|
"TrickplayUrls",
|
||||||
|
"CuepointPlaylist",
|
||||||
|
"XRayMetadata",
|
||||||
|
"PlaybackSettings",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"deviceID": self.device_id,
|
||||||
|
"deviceTypeID": self.device["device_type"],
|
||||||
|
"firmware": 1,
|
||||||
|
"gascEnabled": str(self.pv).lower(),
|
||||||
|
"marketplaceID": self.region["marketplace_id"],
|
||||||
|
"resourceUsage": "CacheResources",
|
||||||
|
"videoMaterialType": "Feature",
|
||||||
|
"playerType": "html5",
|
||||||
|
"clientId": self.client_id,
|
||||||
|
"operatingSystemName": "Linux" if quality == "SD" else "Windows",
|
||||||
|
"operatingSystemVersion": "unknown" if quality == "SD" else "10.0",
|
||||||
|
"deviceDrmOverride": "CENC",
|
||||||
|
"deviceStreamingTechnologyOverride": "DASH",
|
||||||
|
"deviceProtocolOverride": "Https",
|
||||||
|
"deviceVideoCodecOverride": video_codec,
|
||||||
|
"deviceBitrateAdaptationsOverride": bitrate_mode.replace("VBR", "CVBR").replace("+", ","),
|
||||||
|
"deviceVideoQualityOverride": "HD",
|
||||||
|
"deviceHdrFormatsOverride": "None",
|
||||||
|
"supportedDRMKeyScheme": "DUAL_KEY",
|
||||||
|
"liveManifestType": "live,accumulating",
|
||||||
|
"titleDecorationScheme": "primary-content",
|
||||||
|
"subtitleFormat": "TTMLv2",
|
||||||
|
"languageFeature": "MLFv2",
|
||||||
|
"uxLocale": "en_US",
|
||||||
|
"xrayDeviceClass": "normal",
|
||||||
|
"xrayPlaybackMode": "playback",
|
||||||
|
"xrayToken": "XRAY_WEB_2020_V1",
|
||||||
|
"playbackSettingsFormatVersion": "1.0.0",
|
||||||
|
"playerAttributes": json.dumps({"frameRate": "HFR"}),
|
||||||
|
"audioTrackId": "all",
|
||||||
|
},
|
||||||
|
headers={"Authorization": f"Bearer {self.device_token}"},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
manifest = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
self.log.debug(r.text)
|
||||||
|
self.log.error(" - Amazon didn't return JSON data when obtaining the playback manifest.")
|
||||||
|
sys.exit(1)
|
||||||
|
if "error" in manifest:
|
||||||
|
self.log.error(" - Amazon reported an error when obtaining the playback manifest.")
|
||||||
|
self.log.error(manifest["error"])
|
||||||
|
sys.exit(1)
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_original_language(manifest):
|
||||||
|
"""Get a title's original language from manifest data."""
|
||||||
|
try:
|
||||||
|
return next(
|
||||||
|
x["language"].replace("_", "-")
|
||||||
|
for x in manifest["catalogMetadata"]["playback"]["audioTracks"]
|
||||||
|
if x["isOriginalLanguage"]
|
||||||
|
)
|
||||||
|
except (KeyError, StopIteration):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if "defaultAudioTrackId" in manifest.get("playbackUrls", {}):
|
||||||
|
try:
|
||||||
|
return manifest["playbackUrls"]["defaultAudioTrackId"].split("_")[0]
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def clean_mpd_url(mpd_url):
|
||||||
|
"""Clean up an Amazon MPD manifest url."""
|
||||||
|
try:
|
||||||
|
parsed_url = urlparse(mpd_url)
|
||||||
|
new_path = "/".join(
|
||||||
|
segment for segment in parsed_url.path.split("/") if not any(sub in segment for sub in ["$", "dm"])
|
||||||
|
)
|
||||||
|
return urlunparse(parsed_url._replace(path=new_path))
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Unable to parse MPD URL: {e}")
|
||||||
|
|
||||||
|
def parse_title(self, ctx, title):
|
||||||
|
title = title or ctx.parent.params.get("title")
|
||||||
|
if not title:
|
||||||
|
self.log.exit(" - No title ID specified")
|
||||||
|
if not getattr(self, "TITLE_RE"):
|
||||||
|
self.title = title
|
||||||
|
return {}
|
||||||
|
for regex in as_list(self.TITLE_RE):
|
||||||
|
m = re.search(regex, title)
|
||||||
|
if m:
|
||||||
|
self.title = m.group("id")
|
||||||
|
return m.groupdict()
|
||||||
|
self.log.warning(f" - Unable to parse title ID {title!r}, using as-is")
|
||||||
|
self.title = title
|
||||||
62
services/AMZN/config.yaml
Normal file
62
services/AMZN/config.yaml
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
certificate: |
|
||||||
|
CAUSwgUKvAIIAxIQCuQRtZRasVgFt7DIvVtVHBi17OSpBSKOAjCCAQoCggEBAKU2UrYVOSDlcXajWhpEgGhqGraJtFdUPgu6plJGy9ViaRn5mhyXON5PXm
|
||||||
|
w1krQdi0SLxf00FfIgnYFLpDfvNeItGn9rcx0RNPwP39PW7aW0Fbqi6VCaKWlR24kRpd7NQ4woyMXr7xlBWPwPNxK4xmR/6UuvKyYWEkroyeIjWHAqgCjC
|
||||||
|
mpfIpVcPsyrnMuPFGl82MMVnAhTweTKnEPOqJpxQ1bdQvVNCvkba5gjOTbEnJ7aXegwhmCdRQzXjTeEV2dO8oo5YfxW6pRBovzF6wYBMQYpSCJIA24ptAP
|
||||||
|
/2TkneyJuqm4hJNFvtF8fsBgTQQ4TIhnX4bZ9imuhivYLa6HsCAwEAAToPYW1hem9uLmNvbS1wcm9kEoADETQD6R0H/h9fyg0Hw7mj0M7T4s0bcBf4fMhA
|
||||||
|
Rpwk2X4HpvB49bJ5Yvc4t41mAnXGe/wiXbzsddKMiMffkSE1QWK1CFPBgziU23y1PjQToGiIv/sJIFRKRJ4qMBxIl95xlvSEzKdt68n7wqGa442+uAgk7C
|
||||||
|
XU3uTfVofYY76CrPBnEKQfad/CVqTh48geNTb4qRH1TX30NzCsB9NWlcdvg10pCnWSm8cSHu1d9yH+2yQgsGe52QoHHCqHNzG/wAxMYWTevXQW7EPTBeFy
|
||||||
|
SPY0xUN+2F2FhCf5/A7uFUHywd0zNTswh0QJc93LBTh46clRLO+d4RKBiBSj3rah6Y5iXMw9N9o58tCRc9gFHrjfMNubopWHjDOO3ATUgqXrTp+fKVCmsG
|
||||||
|
uGl1ComHxXV9i1AqHwzzY2JY2vFqo73jR3IElr6oChPIwcNokmNc0D4TXtjE0BoYkbWKJfHvJJihzMOvDicWUsemVHvua9/FBtpbHgpbgwijFPjtQF9Ldb
|
||||||
|
8Swf
|
||||||
|
|
||||||
|
device_types:
|
||||||
|
browser: 'AOAGZA014O5RE'
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
detail: /gp/video/api/getDetailPage
|
||||||
|
browse: /cdp/catalog/Browse
|
||||||
|
playback: /cdp/catalog/GetPlaybackResources
|
||||||
|
license: /cdp/catalog/GetPlaybackResources
|
||||||
|
xray: /swift/page/xray
|
||||||
|
ontv: /gp/video/ontv/code
|
||||||
|
devicelink: /gp/video/api/codeBasedLinking
|
||||||
|
codepair: /auth/create/codepair
|
||||||
|
register: /auth/register
|
||||||
|
token: /auth/token
|
||||||
|
|
||||||
|
regions:
|
||||||
|
us:
|
||||||
|
base: 'www.amazon.com'
|
||||||
|
base_api: 'api.amazon.com'
|
||||||
|
base_manifest: 'atv-ps.amazon.com'
|
||||||
|
marketplace_id: 'ATVPDKIKX0DER'
|
||||||
|
|
||||||
|
gb:
|
||||||
|
base: 'www.amazon.co.uk'
|
||||||
|
base_api: 'api.amazon.co.uk'
|
||||||
|
base_manifest: 'atv-ps-eu.amazon.co.uk'
|
||||||
|
marketplace_id: 'A2IR4J4NTCP2M5' # A1F83G8C2ARO7P is also another marketplace_id
|
||||||
|
|
||||||
|
it:
|
||||||
|
base: 'www.amazon.it'
|
||||||
|
base_api: 'api.amazon.it'
|
||||||
|
base_manifest: 'atv-ps-eu.primevideo.com'
|
||||||
|
marketplace_id: 'A3K6Y4MI8GDYMT'
|
||||||
|
|
||||||
|
de:
|
||||||
|
base: 'www.amazon.de'
|
||||||
|
base_api: 'api.amazon.de'
|
||||||
|
base_manifest: 'atv-ps-eu.amazon.de'
|
||||||
|
marketplace_id: 'A1PA6795UKMFR9'
|
||||||
|
|
||||||
|
au:
|
||||||
|
base: 'www.amazon.com.au'
|
||||||
|
base_api: 'api.amazon.com.au'
|
||||||
|
base_manifest: 'atv-ps-fe.amazon.com.au'
|
||||||
|
marketplace_id: 'A3K6Y4MI8GDYMT'
|
||||||
|
|
||||||
|
jp:
|
||||||
|
base: 'www.amazon.co.jp'
|
||||||
|
base_api: 'api.amazon.co.jp'
|
||||||
|
base_manifest: 'atv-ps-fe.amazon.co.jp'
|
||||||
|
marketplace_id: 'A1VC38T7YXB528'
|
||||||
250
services/AUBC/__init__.py
Normal file
250
services/AUBC/__init__.py
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Subtitle, Tracks
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class AUBC(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for ABC iView streaming service (https://iview.abc.net.au/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be complete URL:
|
||||||
|
SHOW: https://iview.abc.net.au/show/return-to-paradise
|
||||||
|
EPISODE: https://iview.abc.net.au/video/DR2314H001S00
|
||||||
|
MOVIE: https://iview.abc.net.au/show/way-back / https://iview.abc.net.au/show/way-back/video/ZW3981A001S00
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("au",)
|
||||||
|
ALIASES = ("iview", "abciview", "iv",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="AUBC", short_help="https://iview.abc.net.au/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> AUBC:
|
||||||
|
return AUBC(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
url = (
|
||||||
|
"https://y63q32nvdl-1.algolianet.com/1/indexes/*/queries?x-algolia-agent=Algolia"
|
||||||
|
"%20for%20JavaScript%20(4.9.1)%3B%20Browser%20(lite)%3B%20react%20(17.0.2)%3B%20"
|
||||||
|
"react-instantsearch%20(6.30.2)%3B%20JS%20Helper%20(3.10.0)&x-"
|
||||||
|
"algolia-api-key=bcdf11ba901b780dc3c0a3ca677fbefc&x-algolia-application-id=Y63Q32NVDL"
|
||||||
|
)
|
||||||
|
payload = {
|
||||||
|
"requests": [
|
||||||
|
{
|
||||||
|
"indexName": "ABC_production_iview_web",
|
||||||
|
"params": f"query={self.title}&tagFilters=&userToken=anonymous-74be3cf1-1dc7-4fa1-9cff-19592162db1c",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
results = self._request("POST", url, payload=payload)["results"]
|
||||||
|
hits = [x for x in results[0]["hits"] if x["docType"] == "Program"]
|
||||||
|
|
||||||
|
for result in hits:
|
||||||
|
yield SearchResult(
|
||||||
|
id_="https://iview.abc.net.au/show/{}".format(result.get("slug")),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label=result.get("subType"),
|
||||||
|
url="https://iview.abc.net.au/show/{}".format(result.get("slug")),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
title_re = r"^(?:https?://(?:www.)?iview.abc.net.au/(?P<type>show|video)/)?(?P<id>[a-zA-Z0-9_-]+)"
|
||||||
|
try:
|
||||||
|
kind, title_id = (re.match(title_re, self.title).group(i) for i in ("type", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("- Could not parse ID from title")
|
||||||
|
|
||||||
|
if kind == "show":
|
||||||
|
data = self._request("GET", "/v3/show/{}".format(title_id))
|
||||||
|
label = data.get("type")
|
||||||
|
|
||||||
|
if label.lower() in ("series", "program"):
|
||||||
|
episodes = self._series(title_id)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
elif label.lower() in ("feature", "movie"):
|
||||||
|
movie = self._movie(data)
|
||||||
|
return Movies(movie)
|
||||||
|
|
||||||
|
elif kind == "video":
|
||||||
|
episode = self._episode(title_id)
|
||||||
|
return Series([episode])
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
video = self._request("GET", "/v3/video/{}".format(title.id))
|
||||||
|
if not video.get("playable"):
|
||||||
|
raise ConnectionError(video.get("unavailableMessage"))
|
||||||
|
|
||||||
|
playlist = video.get("_embedded", {}).get("playlist", {})
|
||||||
|
if not playlist:
|
||||||
|
raise ConnectionError("Could not find a playlist for this title")
|
||||||
|
|
||||||
|
streams = next(x["streams"]["mpegdash"] for x in playlist if x["type"] == "program")
|
||||||
|
captions = next((x.get("captions") for x in playlist if x["type"] == "program"), None)
|
||||||
|
manifest = streams["720"].replace("720", "1080") if streams.get("720") else streams["sd"]
|
||||||
|
title.data["protected"] = True if streams.get("protected") else False
|
||||||
|
|
||||||
|
tracks = DASH.from_url(manifest, self.session).to_tracks(title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
if captions:
|
||||||
|
subtitles = captions.get("src-vtt")
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(subtitles.encode()).hexdigest()[0:6],
|
||||||
|
url=subtitles,
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitles[-3:]),
|
||||||
|
language=title.language,
|
||||||
|
forced=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
if not title.data.get("cuePoints"):
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
credits = next((x.get("start") for x in title.data["cuePoints"] if x["type"] == "end-credits"), None)
|
||||||
|
if credits:
|
||||||
|
return Chapters([Chapter(name="Credits", timestamp=credits * 1000)])
|
||||||
|
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Union[Movies, Series], track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
if not title.data.get("protected"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
customdata = self._license(title.id)
|
||||||
|
headers = {"customdata": customdata}
|
||||||
|
|
||||||
|
r = self.session.post(self.config["endpoints"]["license"], headers=headers, data=challenge)
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific
|
||||||
|
|
||||||
|
def _series(self, title: str) -> Episode:
|
||||||
|
data = self._request("GET", "/v3/series/{}".format(title))
|
||||||
|
|
||||||
|
episodes = [
|
||||||
|
self.create_episode(episode)
|
||||||
|
for season in data
|
||||||
|
for episode in reversed(season["_embedded"]["videoEpisodes"]["items"])
|
||||||
|
if season.get("episodeCount")
|
||||||
|
]
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def _movie(self, data: dict) -> Movie:
|
||||||
|
return [
|
||||||
|
Movie(
|
||||||
|
id_=data["_embedded"]["highlightVideo"]["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=data.get("title"),
|
||||||
|
year=data.get("productionYear"),
|
||||||
|
data=data,
|
||||||
|
language=data.get("analytics", {}).get("dataLayer", {}).get("d_language", "en"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, video_id: str) -> Episode:
|
||||||
|
data = self._request("GET", "/v3/video/{}".format(video_id))
|
||||||
|
return self.create_episode(data)
|
||||||
|
|
||||||
|
def _license(self, video_id: str):
|
||||||
|
token = self._request("POST", "/v3/token/jwt", data={"clientId": self.config["client"]})["token"]
|
||||||
|
response = self._request("GET", "/v3/token/drm/{}".format(video_id), headers={"bearer": token})
|
||||||
|
|
||||||
|
return response["license"]
|
||||||
|
|
||||||
|
def create_episode(self, episode: dict) -> Episode:
|
||||||
|
title = episode["showTitle"]
|
||||||
|
season = re.search(r"Series (\d+)", episode.get("title"))
|
||||||
|
number = re.search(r"Episode (\d+)", episode.get("title"))
|
||||||
|
names_a = re.search(r"Series \d+ Episode \d+ (.+)", episode.get("title"))
|
||||||
|
names_b = re.search(r"Series \d+ (.+)", episode.get("title"))
|
||||||
|
name = names_a.group(1) if names_a else names_b.group(1) if names_b else episode.get("displaySubtitle")
|
||||||
|
|
||||||
|
language = episode.get("analytics", {}).get("dataLayer", {}).get("d_language", "en")
|
||||||
|
|
||||||
|
return Episode(
|
||||||
|
id_=episode["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=title,
|
||||||
|
season=int(season.group(1)) if season else 0,
|
||||||
|
number=int(number.group(1)) if number else 0,
|
||||||
|
name=name,
|
||||||
|
data=episode,
|
||||||
|
language=language,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
api: str,
|
||||||
|
params: dict = None,
|
||||||
|
headers: dict = None,
|
||||||
|
data: dict = None,
|
||||||
|
payload: dict = None,
|
||||||
|
) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.config["endpoints"]["base_url"], api)
|
||||||
|
|
||||||
|
if params:
|
||||||
|
self.session.params.update(params)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, url, data=data, json=payload))
|
||||||
|
|
||||||
|
response = self.session.send(prep)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"{response.text}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(response.content)
|
||||||
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
raise ValueError(f"Failed to parse JSON: {response.text}") from e
|
||||||
|
|
||||||
9
services/AUBC/config.yaml
Normal file
9
services/AUBC/config.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
headers:
|
||||||
|
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0
|
||||||
|
accept-language: en-US,en;q=0.8
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base_url: https://api.iview.abc.net.au
|
||||||
|
license: https://wv-keyos.licensekeyserver.com/
|
||||||
|
|
||||||
|
client: "1d4b5cba-42d2-403e-80e7-34565cdf772d"
|
||||||
318
services/CBC/__init__.py
Normal file
318
services/CBC/__init__.py
Normal file
@ -0,0 +1,318 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class CBC(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for CBC Gem streaming service (https://gem.cbc.ca/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Credentials
|
||||||
|
Robustness:
|
||||||
|
AES-128: 1080p, DDP5.1
|
||||||
|
Widevine L3: 720p, DDP5.1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be complete title URL or just the slug:
|
||||||
|
SHOW: https://gem.cbc.ca/murdoch-mysteries OR murdoch-mysteries
|
||||||
|
MOVIE: https://gem.cbc.ca/the-babadook OR the-babadook
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- DRM encrypted titles max out at 720p.
|
||||||
|
- CCExtrator v0.94 will likely fail to extract subtitles. It's recommended to downgrade to v0.93.
|
||||||
|
- Some audio tracks contain invalid data, causing warning messages from mkvmerge during muxing
|
||||||
|
These can be ignored.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("ca",)
|
||||||
|
ALIASES = ("gem", "cbcgem",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="CBC", short_help="https://gem.cbc.ca/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> CBC:
|
||||||
|
return CBC(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title: str = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.base_url: str = self.config["endpoints"]["base_url"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"device": "web",
|
||||||
|
"pageNumber": "1",
|
||||||
|
"pageSize": "20",
|
||||||
|
"term": self.title,
|
||||||
|
}
|
||||||
|
response: dict = self._request("GET", "/ott/catalog/v1/gem/search", params=params)
|
||||||
|
|
||||||
|
for result in response.get("result", []):
|
||||||
|
yield SearchResult(
|
||||||
|
id_="https://gem.cbc.ca/{}".format(result.get("url")),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label=result.get("type"),
|
||||||
|
url="https://gem.cbc.ca/{}".format(result.get("url")),
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not credential:
|
||||||
|
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||||
|
|
||||||
|
tokens: Optional[Any] = self.cache.get(f"tokens_{credential.sha1}")
|
||||||
|
|
||||||
|
"""
|
||||||
|
All grant types for future reference:
|
||||||
|
PASSWORD("password"),
|
||||||
|
ACCESS_TOKEN("access_token"),
|
||||||
|
REFRESH_TOKEN("refresh_token"),
|
||||||
|
CLIENT_CREDENTIALS("client_credentials"),
|
||||||
|
AUTHORIZATION_CODE("authorization_code"),
|
||||||
|
CODE("code");
|
||||||
|
"""
|
||||||
|
|
||||||
|
if tokens and not tokens.expired:
|
||||||
|
# cached
|
||||||
|
self.log.info(" + Using cached tokens")
|
||||||
|
auth_token: str = tokens.data["access_token"]
|
||||||
|
|
||||||
|
elif tokens and tokens.expired:
|
||||||
|
# expired, refresh
|
||||||
|
self.log.info("Refreshing cached tokens...")
|
||||||
|
auth_url, scopes = self.settings()
|
||||||
|
params = {
|
||||||
|
"client_id": self.config["client"]["id"],
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"refresh_token": tokens.data["refresh_token"],
|
||||||
|
"scope": scopes,
|
||||||
|
}
|
||||||
|
|
||||||
|
access: dict = self._request("POST", auth_url, params=params)
|
||||||
|
|
||||||
|
# Shorten expiration by one hour to account for clock skew
|
||||||
|
tokens.set(access, expiration=int(access["expires_in"]) - 3600)
|
||||||
|
auth_token: str = access["access_token"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# new
|
||||||
|
self.log.info("Requesting new tokens...")
|
||||||
|
auth_url, scopes = self.settings()
|
||||||
|
params = {
|
||||||
|
"client_id": self.config["client"]["id"],
|
||||||
|
"grant_type": "password",
|
||||||
|
"username": credential.username,
|
||||||
|
"password": credential.password,
|
||||||
|
"scope": scopes,
|
||||||
|
}
|
||||||
|
|
||||||
|
access: dict = self._request("POST", auth_url, params=params)
|
||||||
|
|
||||||
|
# Shorten expiration by one hour to account for clock skew
|
||||||
|
tokens.set(access, expiration=int(access["expires_in"]) - 3600)
|
||||||
|
auth_token: str = access["access_token"]
|
||||||
|
|
||||||
|
claims_token: str = self.claims_token(auth_token)
|
||||||
|
self.session.headers.update({"x-claims-token": claims_token})
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
title_re: str = r"^(?:https?://(?:www.)?gem.cbc.ca/)?(?P<id>[a-zA-Z0-9_-]+)"
|
||||||
|
try:
|
||||||
|
title_id: str = re.match(title_re, self.title).group("id")
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("- Could not parse ID from title")
|
||||||
|
|
||||||
|
params = {"device": "web"}
|
||||||
|
data: dict = self._request("GET", "/ott/catalog/v2/gem/show/{}".format(title_id), params=params)
|
||||||
|
label: str = data.get("contentType", "").lower()
|
||||||
|
|
||||||
|
if label in ("film", "movie", "standalone"):
|
||||||
|
movies: list[Movie] = self._movie(data)
|
||||||
|
return Movies(movies)
|
||||||
|
|
||||||
|
else:
|
||||||
|
episodes: list[Episode] = self._show(data)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
index: dict = self._request(
|
||||||
|
"GET", "/media/meta/v1/index.ashx", params={"appCode": "gem", "idMedia": title.id, "output": "jsonObject"}
|
||||||
|
)
|
||||||
|
|
||||||
|
title.data["extra"] = {
|
||||||
|
"chapters": index["Metas"].get("Chapitres"),
|
||||||
|
"credits": index["Metas"].get("CreditStartTime"),
|
||||||
|
}
|
||||||
|
|
||||||
|
self.drm: bool = index["Metas"].get("isDrmActive") == "true"
|
||||||
|
if self.drm:
|
||||||
|
tech: str = next(tech["name"] for tech in index["availableTechs"] if "widevine" in tech["drm"])
|
||||||
|
else:
|
||||||
|
tech: str = next(tech["name"] for tech in index["availableTechs"] if not tech["drm"])
|
||||||
|
|
||||||
|
response: dict = self._request(
|
||||||
|
"GET", self.config["endpoints"]["validation"].format("android", title.id, "smart-tv", tech)
|
||||||
|
)
|
||||||
|
|
||||||
|
manifest = response.get("url")
|
||||||
|
self.license = next((x["value"] for x in response["params"] if "widevineLicenseUrl" in x["name"]), None)
|
||||||
|
self.token = next((x["value"] for x in response["params"] if "widevineAuthToken" in x["name"]), None)
|
||||||
|
|
||||||
|
stream_type: Union[HLS, DASH] = HLS if tech == "hls" else DASH
|
||||||
|
tracks: Tracks = stream_type.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
if stream_type == DASH:
|
||||||
|
for track in tracks.audio:
|
||||||
|
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||||
|
if label is not None and "descriptive" in label.text.lower():
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
track.language = title.language
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
extra: dict = title.data["extra"]
|
||||||
|
|
||||||
|
chapters = []
|
||||||
|
if extra.get("chapters"):
|
||||||
|
chapters = [Chapter(timestamp=x) for x in set(extra["chapters"].split(","))]
|
||||||
|
|
||||||
|
if extra.get("credits"):
|
||||||
|
chapters.append(Chapter(name="Credits", timestamp=float(extra["credits"])))
|
||||||
|
|
||||||
|
return Chapters(chapters)
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(
|
||||||
|
self, *, challenge: bytes, title: Union[Movies, Series], track: AnyTrack
|
||||||
|
) -> Optional[Union[bytes, str]]:
|
||||||
|
if not self.license or not self.token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
headers = {"x-dt-auth-token": self.token}
|
||||||
|
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific
|
||||||
|
|
||||||
|
def _show(self, data: dict) -> list[Episode]:
|
||||||
|
lineups: list = next((x["lineups"] for x in data["content"] if x.get("title", "").lower() == "episodes"), None)
|
||||||
|
if not lineups:
|
||||||
|
self.log.warning("No episodes found for: {}".format(data.get("title")))
|
||||||
|
return
|
||||||
|
|
||||||
|
titles = []
|
||||||
|
for season in lineups:
|
||||||
|
for episode in season["items"]:
|
||||||
|
if episode.get("mediaType", "").lower() == "episode":
|
||||||
|
parts = episode.get("title", "").split(".", 1)
|
||||||
|
episode_name = parts[1].strip() if len(parts) > 1 else parts[0].strip()
|
||||||
|
titles.append(
|
||||||
|
Episode(
|
||||||
|
id_=episode["idMedia"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data.get("title"),
|
||||||
|
season=int(season.get("seasonNumber", 0)),
|
||||||
|
number=int(episode.get("episodeNumber", 0)),
|
||||||
|
name=episode_name,
|
||||||
|
year=episode.get("metadata", {}).get("productionYear"),
|
||||||
|
language=data["structuredMetadata"].get("inLanguage", "en-CA"),
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return titles
|
||||||
|
|
||||||
|
def _movie(self, data: dict) -> list[Movie]:
|
||||||
|
unwanted: tuple = ("episodes", "trailers", "extras")
|
||||||
|
lineups: list = next((x["lineups"] for x in data["content"] if x.get("title", "").lower() not in unwanted), None)
|
||||||
|
if not lineups:
|
||||||
|
self.log.warning("No movies found for: {}".format(data.get("title")))
|
||||||
|
return
|
||||||
|
|
||||||
|
titles = []
|
||||||
|
for season in lineups:
|
||||||
|
for movie in season["items"]:
|
||||||
|
if movie.get("mediaType", "").lower() == "episode":
|
||||||
|
parts = movie.get("title", "").split(".", 1)
|
||||||
|
movie_name = parts[1].strip() if len(parts) > 1 else parts[0].strip()
|
||||||
|
titles.append(
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("idMedia"),
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie_name,
|
||||||
|
year=movie.get("metadata", {}).get("productionYear"),
|
||||||
|
language=data["structuredMetadata"].get("inLanguage", "en-CA"),
|
||||||
|
data=movie,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return titles
|
||||||
|
|
||||||
|
def settings(self) -> tuple:
|
||||||
|
settings = self._request("GET", "/ott/catalog/v1/gem/settings", params={"device": "web"})
|
||||||
|
auth_url: str = settings["identityManagement"]["ropc"]["url"]
|
||||||
|
scopes: str = settings["identityManagement"]["ropc"]["scopes"]
|
||||||
|
return auth_url, scopes
|
||||||
|
|
||||||
|
def claims_token(self, token: str) -> str:
|
||||||
|
headers = {
|
||||||
|
"Authorization": "Bearer " + token,
|
||||||
|
}
|
||||||
|
params = {"device": "web"}
|
||||||
|
response: dict = self._request(
|
||||||
|
"GET", "/ott/subscription/v2/gem/Subscriber/profile", headers=headers, params=params
|
||||||
|
)
|
||||||
|
return response["claimsToken"]
|
||||||
|
|
||||||
|
def _request(self, method: str, api: str, **kwargs: Any) -> Any[dict | str]:
|
||||||
|
url: str = urljoin(self.base_url, api)
|
||||||
|
|
||||||
|
prep: Request = self.session.prepare_request(Request(method, url, **kwargs))
|
||||||
|
response = self.session.send(prep)
|
||||||
|
if response.status_code not in (200, 426):
|
||||||
|
raise ConnectionError(f"{response.status_code} - {response.text}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(response.content)
|
||||||
|
error_keys = ["errorMessage", "ErrorMessage", "ErrorCode", "errorCode", "error"]
|
||||||
|
error_message = next((data.get(key) for key in error_keys if key in data), None)
|
||||||
|
if error_message:
|
||||||
|
self.log.error(f"\n - Error: {error_message}\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ConnectionError("Request for {} failed: {}".format(response.url, response.text))
|
||||||
7
services/CBC/config.yaml
Normal file
7
services/CBC/config.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
endpoints:
|
||||||
|
base_url: "https://services.radio-canada.ca"
|
||||||
|
validation: "/media/validation/v2?appCode=gem&&deviceType={}&idMedia={}&manifestType={}&output=json&tech={}"
|
||||||
|
api_key: "3f4beddd-2061-49b0-ae80-6f1f2ed65b37"
|
||||||
|
|
||||||
|
client:
|
||||||
|
id: "fc05b0ee-3865-4400-a3cc-3da82c330c23"
|
||||||
241
services/CBS/__init__.py
Normal file
241
services/CBS/__init__.py
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from devine.core.utils.sslciphers import SSLCiphers
|
||||||
|
from devine.core.utils.xml import load_xml
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class CBS(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for CBS.com streaming service (https://cbs.com).
|
||||||
|
Credit to @srpen6 for the tip on anonymous session
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 2160p, DDP5.1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be complete URLs:
|
||||||
|
SERIES: https://www.cbs.com/shows/tracker/
|
||||||
|
EPISODE: https://www.cbs.com/shows/video/E0wG_ovVMkLlHOzv7KDpUV9bjeKFFG2v/
|
||||||
|
|
||||||
|
\b
|
||||||
|
Common VPN/proxy errors:
|
||||||
|
- SSLError(SSLEOFError(8, '[SSL: UNEXPECTED_EOF_WHILE_READING]'))
|
||||||
|
- ConnectionError: 406 Not Acceptable, 403 Forbidden
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("us",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="CBS", short_help="https://cbs.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs) -> CBS:
|
||||||
|
return CBS(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"term": self.title,
|
||||||
|
"termCount": 50,
|
||||||
|
"showCanVids": "true",
|
||||||
|
}
|
||||||
|
results = self._request("GET", "/apps-api/v3.1/androidphone/contentsearch/search.json", params=params)["terms"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("path"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=None,
|
||||||
|
label=result.get("term_type"),
|
||||||
|
url=result.get("path"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
title_re = r"https://www\.cbs\.com/shows/(?P<video>video/)?(?P<id>[a-zA-Z0-9_-]+)/?$"
|
||||||
|
try:
|
||||||
|
video, title_id = (re.match(title_re, self.title).group(i) for i in ("video", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("- Could not parse ID from title")
|
||||||
|
|
||||||
|
if video:
|
||||||
|
episodes = self._episode(title_id)
|
||||||
|
else:
|
||||||
|
episodes = self._show(title_id)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
self.token, self.license = self.ls_session(title.id)
|
||||||
|
manifest = self.get_manifest(title)
|
||||||
|
return DASH.from_url(url=manifest).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
def get_chapters(self, title: Episode) -> Chapters:
|
||||||
|
if not title.data.get("playbackEvents", {}).get("endCreditChapterTimeMs"):
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
end_credits = title.data["playbackEvents"]["endCreditChapterTimeMs"]
|
||||||
|
return Chapters([Chapter(name="Credits", timestamp=end_credits)])
|
||||||
|
|
||||||
|
def certificate(self, **_):
|
||||||
|
return None # will use common privacy cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
headers = {"Authorization": f"Bearer {self.token}"}
|
||||||
|
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||||
|
if not r.ok:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def _show(self, title: str) -> Episode:
|
||||||
|
data = self._request("GET", "/apps-api/v3.0/androidphone/shows/slug/{}.json".format(title))
|
||||||
|
|
||||||
|
links = next((x.get("links") for x in data["showMenu"] if x.get("device_app_id") == "all_platforms"), None)
|
||||||
|
config = next((x.get("videoConfigUniqueName") for x in links if x.get("title").strip() == "Episodes"), None)
|
||||||
|
show = next((x for x in data["show"]["results"] if x.get("type").strip() == "show"), None)
|
||||||
|
seasons = [x.get("seasonNum") for x in data["available_video_seasons"].get("itemList", [])]
|
||||||
|
locale = show.get("locale", "en-US")
|
||||||
|
|
||||||
|
show_data = self._request(
|
||||||
|
"GET", "/apps-api/v2.0/androidphone/shows/{}/videos/config/{}.json".format(show.get("show_id"), config),
|
||||||
|
params={"platformType": "apps", "rows": "1", "begin": "0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
section = next(
|
||||||
|
(x["sectionId"] for x in show_data["videoSectionMetadata"] if x["title"] == "Full Episodes"), None
|
||||||
|
)
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in seasons:
|
||||||
|
res = self._request(
|
||||||
|
"GET", "/apps-api/v2.0/androidphone/videos/section/{}.json".format(section),
|
||||||
|
params={"begin": "0", "rows": "999", "params": f"seasonNum={season}", "seasonNum": season},
|
||||||
|
)
|
||||||
|
episodes.extend(res["sectionItems"].get("itemList", []))
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode["contentId"],
|
||||||
|
title=episode["seriesTitle"],
|
||||||
|
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||||
|
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||||
|
name=episode["label"],
|
||||||
|
language=locale,
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
if episode["fullEpisode"]
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, title: str) -> Episode:
|
||||||
|
data = self._request("GET", "/apps-api/v2.0/androidphone/video/cid/{}.json".format(title))
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode["contentId"],
|
||||||
|
title=episode["seriesTitle"],
|
||||||
|
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||||
|
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||||
|
name=episode["label"],
|
||||||
|
language="en-US",
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in data["itemList"]
|
||||||
|
]
|
||||||
|
|
||||||
|
def ls_session(self, content_id: str) -> str:
|
||||||
|
res = self._request(
|
||||||
|
"GET", "/apps-api/v3.0/androidphone/irdeto-control/anonymous-session-token.json",
|
||||||
|
params={"contentId": content_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
return res.get("ls_session"), res.get("url")
|
||||||
|
|
||||||
|
def get_manifest(self, title: Episode) -> str:
|
||||||
|
try:
|
||||||
|
res = self._request(
|
||||||
|
"GET", "http://link.theplatform.com/s/{}/media/guid/2198311517/{}".format(
|
||||||
|
title.data.get("cmsAccountId"), title.id
|
||||||
|
),
|
||||||
|
params={
|
||||||
|
"format": "SMIL",
|
||||||
|
"assetTypes": "|".join(self.config["assets"]),
|
||||||
|
"formats": "MPEG-DASH,MPEG4,M3U",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
body = load_xml(res).find("body").find("seq").findall("switch")
|
||||||
|
bitrate = max(body, key=lambda x: int(x.find("video").get("system-bitrate")))
|
||||||
|
videos = [x.get("src") for x in bitrate.findall("video")]
|
||||||
|
if not videos:
|
||||||
|
raise ValueError("Could not find any streams - is the title still available?")
|
||||||
|
|
||||||
|
manifest = next(
|
||||||
|
(x for x in videos if "hdr_dash" in x.lower()),
|
||||||
|
next((x for x in videos if "cenc_dash" in x.lower()), videos[0]),
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning("ThePlatform request failed: {}, falling back to standard manifest".format(e))
|
||||||
|
if not title.data.get("streamingUrl"):
|
||||||
|
raise ValueError("Could not find any streams - is the title still available?")
|
||||||
|
|
||||||
|
manifest = title.data.get("streamingUrl")
|
||||||
|
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
def _request(self, method: str, api: str, params: dict = None, headers: dict = None) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.config["endpoints"]["base_url"], api)
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
self.session.params = {"at": self.config["endpoints"]["token"]}
|
||||||
|
for prefix in ("https://", "http://"):
|
||||||
|
self.session.mount(prefix, SSLCiphers(security_level=2))
|
||||||
|
|
||||||
|
if params:
|
||||||
|
self.session.params.update(params)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, url))
|
||||||
|
|
||||||
|
response = self.session.send(prep)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"{response.text}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(response.content)
|
||||||
|
if not data.get("success"):
|
||||||
|
raise ValueError(data.get("message"))
|
||||||
|
return data
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return response.text
|
||||||
10
services/CBS/config.yaml
Normal file
10
services/CBS/config.yaml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
headers:
|
||||||
|
user-agent: Mozilla/5.0 (Linux; Android 13; SM-A536E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Mobile Safari/537.36
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base_url: https://cbsdigital.cbs.com
|
||||||
|
token: ABBsaBMagMmYLUc9iXB0lXEKsUQ0/MwRn6z3Tg0KKQaH7Q6QGqJcABwlBP4XiMR1b0Q=
|
||||||
|
|
||||||
|
assets: [HLS_AES, DASH_LIVE, DASH_CENC, DASH_CENC_HDR10, DASH_LIVE, DASH_TA, DASH_CENC_PS4]
|
||||||
|
|
||||||
|
|
||||||
533
services/CR/__init__.py
Normal file
533
services/CR/__init__.py
Normal file
@ -0,0 +1,533 @@
|
|||||||
|
import math
|
||||||
|
import uuid
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Optional, Union, Generator
|
||||||
|
import click
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Titles_T, Title_T, Series, Episode
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Subtitle, Chapter
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
|
||||||
|
|
||||||
|
class CR(Service):
|
||||||
|
"""
|
||||||
|
Service code for Crunchyroll (https://crunchyroll.com)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: TPD94 - edited
|
||||||
|
Authorization: Login
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 1080p
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title/episode URL or id as input:
|
||||||
|
https://www.crunchyroll.com/series/GG5H5XQ7D/kaiju-no-8
|
||||||
|
OR
|
||||||
|
GG5H5XQ7D
|
||||||
|
- Supports series
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="CR", short_help="https://crunchyroll.com/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option("--display_locale", type=str, help="Set display language")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return CR(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title, display_locale="en-US"):
|
||||||
|
# Set the title, what the user inputs
|
||||||
|
|
||||||
|
# Try parsing if it's a URL
|
||||||
|
try:
|
||||||
|
# Split the URL into parts by "/"
|
||||||
|
parts = title.split("/")
|
||||||
|
|
||||||
|
# Set the identifier for "series"
|
||||||
|
identifier_index = parts.index("series") + 1
|
||||||
|
|
||||||
|
# Extract the series ID
|
||||||
|
self.title = parts[identifier_index]
|
||||||
|
|
||||||
|
# If just a series ID
|
||||||
|
except:
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
# Set display language
|
||||||
|
self.display_locale = display_locale
|
||||||
|
|
||||||
|
# Initialize variable for token
|
||||||
|
self.token = None
|
||||||
|
|
||||||
|
# Initialize variable for refresh token
|
||||||
|
self.refresh_token = None
|
||||||
|
|
||||||
|
# Initialize variable for token expiry
|
||||||
|
self.token_expiry = None
|
||||||
|
|
||||||
|
# Initialize variable for credentials
|
||||||
|
self.credential = None
|
||||||
|
|
||||||
|
# Initiliaze variable for device_id
|
||||||
|
self.device_id = None
|
||||||
|
|
||||||
|
# Initiliaze variable for device_name
|
||||||
|
self.device_name = None
|
||||||
|
|
||||||
|
# Initiliaze variable for device_type
|
||||||
|
self.device_type = None
|
||||||
|
|
||||||
|
# Overriding the constructor
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
# Get or generate a device_id for the session
|
||||||
|
self.device_id = self.config.get("device", {}).get("id") or str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Get or generate a device_name for the session
|
||||||
|
self.device_name = self.config.get("device", {}).get("name") or "AOSP on IA Emulator"
|
||||||
|
|
||||||
|
# Get or generate a device_type for the session
|
||||||
|
self.device_type = self.config.get("device", {}).get("type") or "Google AOSP on IA Emulator"
|
||||||
|
|
||||||
|
# Load credential for the whole session
|
||||||
|
if self.credential is None:
|
||||||
|
self.credential = credential
|
||||||
|
|
||||||
|
# Check if there is no token.
|
||||||
|
if self.token is None:
|
||||||
|
# Assign a variable to the token and send a post request to acquire/refresh
|
||||||
|
auth_response = self.session.post(
|
||||||
|
# Token auth URL
|
||||||
|
url=self.config["endpoints"]["auth_url"],
|
||||||
|
# Headers
|
||||||
|
headers={
|
||||||
|
"Authorization": "Basic eHVuaWh2ZWRidDNtYmlzdWhldnQ6MWtJUzVkeVR2akUwX3JxYUEzWWVBaDBiVVhVbXhXMTE=",
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
"ETP-Anonymous-ID": f"{uuid.uuid4()}",
|
||||||
|
},
|
||||||
|
# Body
|
||||||
|
data={
|
||||||
|
"username": f"{credential.username}",
|
||||||
|
"password": f"{credential.password}",
|
||||||
|
"grant_type": "password",
|
||||||
|
"scope": "offline_access",
|
||||||
|
"device_id": f"{self.device_id}",
|
||||||
|
"device_name": f"{self.device_name}",
|
||||||
|
"device_type": f"{self.device_type}",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Set the token
|
||||||
|
self.token = auth_response["access_token"]
|
||||||
|
|
||||||
|
# Set the refresh token
|
||||||
|
self.refresh_token = auth_response["refresh_token"]
|
||||||
|
|
||||||
|
# Set the token expiry time
|
||||||
|
self.token_expiry = (datetime.now() + timedelta(minutes=4)).timestamp()
|
||||||
|
|
||||||
|
# Update session headers to have Authorization Bearer token
|
||||||
|
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||||
|
|
||||||
|
# Return the token if called
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
# Check for token expiry
|
||||||
|
if self.token_expiry:
|
||||||
|
if self.token_expiry < datetime.now().timestamp():
|
||||||
|
# Assign a variable to the token and send a post request to acquire/refresh
|
||||||
|
auth_response = self.session.post(
|
||||||
|
# Token auth URL
|
||||||
|
url=self.config["endpoints"]["auth_url"],
|
||||||
|
# Headers
|
||||||
|
headers={
|
||||||
|
"Authorization": "Basic eHVuaWh2ZWRidDNtYmlzdWhldnQ6MWtJUzVkeVR2akUwX3JxYUEzWWVBaDBiVVhVbXhXMTE=",
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
"ETP-Anonymous-ID": f"{uuid.uuid4()}",
|
||||||
|
},
|
||||||
|
# Body
|
||||||
|
data={
|
||||||
|
"refresh_token": self.refresh_token,
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"scope": "offline_access",
|
||||||
|
"device_id": f"{self.device_id}",
|
||||||
|
"device_name": f"{self.device_name}",
|
||||||
|
"device_type": f"{self.device_type}",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Set the token
|
||||||
|
self.token = auth_response["access_token"]
|
||||||
|
|
||||||
|
# Set the refresh token
|
||||||
|
self.refresh_token = auth_response["refresh_token"]
|
||||||
|
|
||||||
|
# Set the token expiry time
|
||||||
|
self.token_expiry = (datetime.now() + timedelta(minutes=4)).timestamp()
|
||||||
|
|
||||||
|
# Update session headers to have Authorization Bearer token
|
||||||
|
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||||
|
|
||||||
|
# Return the token if called
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
# If neither, return token if called from function
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
# Create a list for episodes
|
||||||
|
episodes = []
|
||||||
|
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get each season from series metadata
|
||||||
|
for season in self.session.get(
|
||||||
|
url=self.config["endpoints"]["series_metadata"].format(title=self.title),
|
||||||
|
params={"force_locale": "", "locale": f"{self.display_locale}"},
|
||||||
|
).json()["data"]:
|
||||||
|
# Get each episode from season metadata
|
||||||
|
for episode in self.session.get(
|
||||||
|
url=self.config["endpoints"]["episode_metadata"].format(season=season["id"]),
|
||||||
|
params={"locale": f"{self.display_locale}"},
|
||||||
|
).json()["data"]:
|
||||||
|
# Get the original language guid
|
||||||
|
if episode["versions"]:
|
||||||
|
for version in episode["versions"]:
|
||||||
|
if version["original"] == True:
|
||||||
|
original_id = version["guid"]
|
||||||
|
else:
|
||||||
|
original_id = episode["id"]
|
||||||
|
|
||||||
|
# Extract correct series and episode number from identifier
|
||||||
|
try:
|
||||||
|
identifier = episode["identifier"]
|
||||||
|
identifier_parts = identifier.split("|")
|
||||||
|
sn = None
|
||||||
|
en = None
|
||||||
|
if len(identifier_parts) == 3:
|
||||||
|
if identifier_parts[1].startswith("S") and identifier_parts[1][1:].isdigit():
|
||||||
|
sn = int((identifier_parts[1])[1:])
|
||||||
|
en = float((identifier_parts[2])[1:])
|
||||||
|
|
||||||
|
elif identifier_parts[1].startswith("S") and not identifier_parts[1][1:].isdigit():
|
||||||
|
sn = episode["season_number"]
|
||||||
|
en = float((identifier_parts[2])[1:])
|
||||||
|
|
||||||
|
elif identifier_parts[1] == "M" or identifier_parts[1].startswith("O"):
|
||||||
|
sn = episode["season_sequence_number"]
|
||||||
|
en = math.ceil(episode["sequence_number"])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Append the available episodes
|
||||||
|
episodes.append(
|
||||||
|
Episode(
|
||||||
|
# id_=episode['id'],
|
||||||
|
id_=original_id,
|
||||||
|
# title=episode['season_title'],
|
||||||
|
title=episode["series_title"],
|
||||||
|
# season=episode['season_sequence_number'],
|
||||||
|
season=sn or episode["season_number"],
|
||||||
|
# number=math.ceil(episode['sequence_number']),
|
||||||
|
number=math.ceil(en or episode["sequence_number"]),
|
||||||
|
name=episode["title"],
|
||||||
|
year=episode["episode_air_date"][:4],
|
||||||
|
language=episode["audio_locale"],
|
||||||
|
service=self.__class__,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return the series
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
# Initialize a tracks class object
|
||||||
|
tracks = Tracks()
|
||||||
|
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get the originally called title
|
||||||
|
try:
|
||||||
|
title_metadata = self.session.get(
|
||||||
|
url=self.config["endpoints"]["video_token"].format(id=title.id),
|
||||||
|
headers={
|
||||||
|
"Accept-Encoding": "gzip",
|
||||||
|
"Authorization": f"Bearer {self.token}",
|
||||||
|
"Connection": "Keep-Alive",
|
||||||
|
"ETP-Anonymous-ID": f"{uuid.uuid4()}",
|
||||||
|
"Host": "www.crunchyroll.com",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
except:
|
||||||
|
print(title_metadata)
|
||||||
|
|
||||||
|
# Add original MPD
|
||||||
|
original_mpd_tracks = DASH.from_url(url=title_metadata["url"], session=self.session).to_tracks(
|
||||||
|
language=title_metadata["audioLocale"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the GUID
|
||||||
|
for track in original_mpd_tracks:
|
||||||
|
track.data["guid"] = title.id
|
||||||
|
|
||||||
|
# Add the tracks
|
||||||
|
tracks.add(original_mpd_tracks)
|
||||||
|
|
||||||
|
# Keep track of added subtitles to avoid duplicates
|
||||||
|
added_subtitles = set()
|
||||||
|
|
||||||
|
# Get all the subtitles
|
||||||
|
for subtitle_lang, subtitle_data in title_metadata["subtitles"].items():
|
||||||
|
if subtitle_lang == "none" or subtitle_data["language"] == "none" or "format" not in subtitle_data:
|
||||||
|
continue
|
||||||
|
if subtitle_data["format"].lower() != "ass":
|
||||||
|
continue
|
||||||
|
subtitle_key = (
|
||||||
|
subtitle_data["language"],
|
||||||
|
Subtitle.Codec.from_mime(subtitle_data["format"]),
|
||||||
|
track.data["guid"],
|
||||||
|
)
|
||||||
|
if subtitle_key not in added_subtitles:
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
language=subtitle_data["language"],
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitle_data["format"]),
|
||||||
|
url=subtitle_data["url"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
added_subtitles.add(subtitle_key)
|
||||||
|
|
||||||
|
# Deactivate the video token
|
||||||
|
self.deactivate_video_token(title=title.id, token=title_metadata["token"])
|
||||||
|
|
||||||
|
# Delete the video token
|
||||||
|
self.delete_video_token(title=title.id, token=title_metadata["token"])
|
||||||
|
|
||||||
|
# Get other language MPDs
|
||||||
|
for version in title_metadata["versions"]:
|
||||||
|
if version["guid"] != title.id:
|
||||||
|
other_title_metadata = self.session.get(
|
||||||
|
url=self.config["endpoints"]["video_token"].format(id=version["guid"]),
|
||||||
|
headers={
|
||||||
|
"Accept-Encoding": "gzip",
|
||||||
|
"Authorization": f"Bearer {self.token}",
|
||||||
|
"Connection": "Keep-Alive",
|
||||||
|
"ETP-Anonymous-ID": f"{uuid.uuid4()}",
|
||||||
|
"Host": "www.crunchyroll.com",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Add other language MPD
|
||||||
|
other_mpd_tracks = DASH.from_url(url=other_title_metadata["url"], session=self.session).to_tracks(
|
||||||
|
language=other_title_metadata["audioLocale"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the GUID
|
||||||
|
for track in other_mpd_tracks:
|
||||||
|
track.data["guid"] = version["guid"]
|
||||||
|
|
||||||
|
# Add the tracks
|
||||||
|
tracks.add(other_mpd_tracks)
|
||||||
|
|
||||||
|
# Get all the subtitles, ensuring no duplicates, don't skip Forced
|
||||||
|
for subtitle_lang, subtitle_data in other_title_metadata["subtitles"].items():
|
||||||
|
if subtitle_lang == "none" or subtitle_data["language"] == "none" or "format" not in subtitle_data:
|
||||||
|
continue
|
||||||
|
if subtitle_data["format"].lower() != "ass":
|
||||||
|
continue
|
||||||
|
subtitle_key = (
|
||||||
|
subtitle_data["language"],
|
||||||
|
Subtitle.Codec.from_mime(subtitle_data["format"]),
|
||||||
|
track.data["guid"],
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
subtitle_key not in added_subtitles
|
||||||
|
and subtitle_data["language"] == other_title_metadata["audioLocale"]
|
||||||
|
):
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
language=subtitle_data["language"],
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitle_data["format"]),
|
||||||
|
forced=True,
|
||||||
|
url=subtitle_data["url"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
added_subtitles.add(subtitle_key)
|
||||||
|
|
||||||
|
# Deactivate the video token
|
||||||
|
self.deactivate_video_token(title=version["guid"], token=other_title_metadata["token"])
|
||||||
|
|
||||||
|
# Delete the video token
|
||||||
|
self.delete_video_token(title=version["guid"], token=other_title_metadata["token"])
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
# Initalize a Chapters class object
|
||||||
|
chapters = Chapters()
|
||||||
|
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get the chapters metadata
|
||||||
|
try:
|
||||||
|
chapters_metadata = self.session.get(
|
||||||
|
url=self.config["endpoints"]["chapters_metadata"].format(id=title.id)
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Parse the chapters sections from response
|
||||||
|
recap = chapters_metadata.get("recap")
|
||||||
|
intro = chapters_metadata.get("intro")
|
||||||
|
credits = chapters_metadata.get("credits")
|
||||||
|
preview = chapters_metadata.get("preview")
|
||||||
|
|
||||||
|
if recap:
|
||||||
|
if recap["start"] != 0:
|
||||||
|
# Add a dummy Episode chapter using the start time of the file, maybe is wrong parsed Recap section but can't be sure
|
||||||
|
chapters.add(Chapter(timestamp=0, name="Episode"))
|
||||||
|
# Manage the recap section, event it's rarely used
|
||||||
|
chapters.add(Chapter(timestamp=recap["start"] * 1000, name=recap["type"].capitalize()))
|
||||||
|
|
||||||
|
if intro:
|
||||||
|
# Manage the cases when no Recap chapter is provided but Intro not starts on 0, avoiding the devine fallback on first chapter
|
||||||
|
if not recap and intro["start"] != 0:
|
||||||
|
# Add a dummy Episode chapter using the start time of the file, maybe is the Recap section but can't be sure
|
||||||
|
chapters.add(Chapter(timestamp=0, name="Episode"))
|
||||||
|
chapters.add(Chapter(timestamp=intro["start"] * 1000, name="Opening".capitalize()))
|
||||||
|
# Add a dummy Episode chapter using the end time of Intro chapter
|
||||||
|
chapters.add(Chapter(timestamp=intro["end"] * 1000, name="Episode"))
|
||||||
|
|
||||||
|
if credits:
|
||||||
|
if not recap and not intro:
|
||||||
|
# Manage the cases when no Intro nor Recap chapter are provided, avoiding the devine fallback on first chapter
|
||||||
|
chapters.add(Chapter(timestamp=0, name="Episode"))
|
||||||
|
chapters.add(Chapter(timestamp=credits["start"] * 1000, name="Ending".capitalize()))
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
if not recap and not intro and not credits:
|
||||||
|
# Add a dummy Episode chapter using the start time of the file, when no other chapters are provided
|
||||||
|
chapters.add(Chapter(timestamp=0, name="Episode"))
|
||||||
|
# Try to avoid broken Preview chapter using the end time of Credits chapter
|
||||||
|
if credits and "end" in credits:
|
||||||
|
# Manage the cases when the credits have some non-Preview scenes before the actual preview
|
||||||
|
if preview["start"] > credits["end"]:
|
||||||
|
chapters.add(Chapter(timestamp=credits["end"] * 1000, name="Episode"))
|
||||||
|
chapters.add(Chapter(timestamp=preview["start"] * 1000, name="Preview"))
|
||||||
|
else:
|
||||||
|
chapters.add(Chapter(timestamp=credits["end"] * 1000, name="Preview"))
|
||||||
|
else:
|
||||||
|
# Fallback
|
||||||
|
chapters.add(Chapter(timestamp=preview["start"] * 1000, name="Preview"))
|
||||||
|
|
||||||
|
elif credits and "end" in credits:
|
||||||
|
# Manage the cases when no Preview is provided but it actually exists
|
||||||
|
chapters.add(Chapter(timestamp=credits["end"] * 1000, name="Preview"))
|
||||||
|
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get a video token
|
||||||
|
video_token = self.get_video_token(title=track.data["guid"])
|
||||||
|
|
||||||
|
# Update the headers
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"User-Agent": "okhttp/4.12.0",
|
||||||
|
"content-type": "application/octet-stream",
|
||||||
|
"x-cr-content-id": f"{track.data['guid']}",
|
||||||
|
"x-cr-video-token": f"{video_token}",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the license
|
||||||
|
license_response = self.session.post(
|
||||||
|
url=self.config["endpoints"]["license_url"], data=challenge
|
||||||
|
).content.decode()
|
||||||
|
|
||||||
|
# Deactivate the video token
|
||||||
|
self.deactivate_video_token(title=track.data["guid"], token=video_token)
|
||||||
|
|
||||||
|
# Delete the video token
|
||||||
|
self.delete_video_token(title=track.data["guid"], token=video_token)
|
||||||
|
|
||||||
|
# Get the license
|
||||||
|
return license_response
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get the search results
|
||||||
|
search_results = self.session.get(
|
||||||
|
url=self.config["endpoints"]["search_url"].format(search_keyword=self.title)
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Iterate through series responses, create generator for results.
|
||||||
|
for result_type in search_results["data"]:
|
||||||
|
if result_type["type"] == "series":
|
||||||
|
for series_results in result_type["items"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=series_results["id"],
|
||||||
|
title=series_results["title"],
|
||||||
|
description=series_results["description"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Define function to retrieve video token for crunchyroll.
|
||||||
|
def get_video_token(self, title: str) -> str:
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Get the token
|
||||||
|
video_token = self.session.get(
|
||||||
|
url=self.config["endpoints"]["video_token"].format(id=title),
|
||||||
|
headers={
|
||||||
|
"Accept-Encoding": "gzip",
|
||||||
|
"Authorization": f"Bearer {self.token}",
|
||||||
|
"Connection": "Keep-Alive",
|
||||||
|
"ETP-Anonymous-ID": f"{uuid.uuid4()}",
|
||||||
|
"Host": "www.crunchyroll.com",
|
||||||
|
},
|
||||||
|
).json()["token"]
|
||||||
|
|
||||||
|
# Return None.
|
||||||
|
return video_token
|
||||||
|
|
||||||
|
# Define function to deactivate video token for crunchyroll.
|
||||||
|
def deactivate_video_token(self, title: str, token: str) -> None:
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Delete the token
|
||||||
|
self.session.patch(
|
||||||
|
url=self.config["endpoints"]["video_token_patch"].format(title_id=title, video_token=token),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return None.
|
||||||
|
return
|
||||||
|
|
||||||
|
# Define function to delete video token for crunchyroll.
|
||||||
|
def delete_video_token(self, title: str, token: str) -> None:
|
||||||
|
# Check/Call for authorization bearer token
|
||||||
|
self.authenticate(credential=self.credential)
|
||||||
|
|
||||||
|
# Delete the token
|
||||||
|
self.session.delete(
|
||||||
|
url=self.config["endpoints"]["video_token_delete"].format(title_id=title, video_token=token),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return None.
|
||||||
|
return
|
||||||
9
services/CR/config.yaml
Normal file
9
services/CR/config.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
endpoints:
|
||||||
|
auth_url: https://beta-api.crunchyroll.com/auth/v1/token
|
||||||
|
series_metadata: https://beta-api.crunchyroll.com/content/v2/cms/series/{title}/seasons
|
||||||
|
episode_metadata: https://beta-api.crunchyroll.com/content/v2/cms/seasons/{season}/episodes
|
||||||
|
chapters_url: https://static.crunchyroll.com/skip-events/production/{id}.json
|
||||||
|
video_token: https://cr-play-service.prd.crunchyrollsvc.com/v1/{id}/android/phone/play
|
||||||
|
video_token_delete: https://cr-play-service.prd.crunchyrollsvc.com/v1/token/{title_id}/{video_token}
|
||||||
|
license_url: https://cr-license-proxy.prd.crunchyrollsvc.com/v1/license/widevine?specConform=true
|
||||||
|
search_url: https://beta-api.crunchyroll.com/content/v2/discover/search?q={search_keyword}
|
||||||
289
services/CRAVE/__init__.py
Normal file
289
services/CRAVE/__init__.py
Normal file
@ -0,0 +1,289 @@
|
|||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
import click
|
||||||
|
import re
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
|
||||||
|
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks.subtitle import Subtitle
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class CRAVE(Service):
|
||||||
|
"""
|
||||||
|
Service code for Bell Media's Crave streaming service (https://crave.ca).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Credentials
|
||||||
|
Security: UHD@-- HD@L3, doesn't care about releases.
|
||||||
|
|
||||||
|
TODO: Movies are not yet supported
|
||||||
|
NOTE: Devine accepts "def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> Optional[str]:" as default.
|
||||||
|
But we can also use the default "def configure(self)" method which is used in VT(Vinetrimmer) but just adding the others inside configure.ie like this,
|
||||||
|
"def config(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> Optional[str]:"
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["CRAV", "crave"] # CRAV is unconfirmed but likely candidate, been in use for a few months
|
||||||
|
GEOFENCE = ["ca"]
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?crave\.ca(?:/[a-z]{2})?/(?:movies|tv-shows)/)?(?P<id>[a-z0-9-]+)"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="CRAVE", short_help="https://crave.ca")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, **kwargs: Any) -> "CRAVE":
|
||||||
|
return CRAVE(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.parse_title(ctx, title)
|
||||||
|
|
||||||
|
self.vcodec = ctx.parent.params["vcodec"]
|
||||||
|
|
||||||
|
self.access_token = None
|
||||||
|
self.credential = None
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
if cookies:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
if self.credential is None and credential:
|
||||||
|
self.credential = credential
|
||||||
|
|
||||||
|
headers = {"Authorization": self.config["headers"]["authorization"]}
|
||||||
|
|
||||||
|
body = {
|
||||||
|
"username": self.credential.username,
|
||||||
|
"password": self.credential.password,
|
||||||
|
"grant_type": "password",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.post(
|
||||||
|
"https://account.bellmedia.ca/api/login/v2.1",
|
||||||
|
headers=headers,
|
||||||
|
data=body,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info(" + Logging in")
|
||||||
|
self.log.info(f"Fetching Axis title ID based on provided path: {self.title}")
|
||||||
|
axis_id = self.get_axis_id(f"/tv-shows/{self.title}") or self.get_axis_id(f"/movies/{self.title}")
|
||||||
|
self.title = axis_id
|
||||||
|
self.log.info(f" + Obtained: {self.title}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response_data = r.json()
|
||||||
|
self.access_token = response_data.get("access_token")
|
||||||
|
if not self.access_token:
|
||||||
|
raise ValueError(f"Login failed: {response_data}")
|
||||||
|
return self.access_token
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to parse login response: {r.text}")
|
||||||
|
|
||||||
|
def get_titles(self):
|
||||||
|
# Fetch main title information
|
||||||
|
res = self.session.post(
|
||||||
|
url="https://www.crave.ca/space-graphql/graphql",
|
||||||
|
json={
|
||||||
|
"operationName": "axisMedia",
|
||||||
|
"variables": {"axisMediaId": self.title},
|
||||||
|
"query": """
|
||||||
|
query axisMedia($axisMediaId: ID!) {
|
||||||
|
contentData: axisMedia(id: $axisMediaId) {
|
||||||
|
id
|
||||||
|
axisId
|
||||||
|
title
|
||||||
|
originalSpokenLanguage
|
||||||
|
firstPlayableContent {
|
||||||
|
id
|
||||||
|
title
|
||||||
|
axisId
|
||||||
|
path
|
||||||
|
seasonNumber
|
||||||
|
episodeNumber
|
||||||
|
}
|
||||||
|
mediaType
|
||||||
|
firstAirYear
|
||||||
|
seasons {
|
||||||
|
title
|
||||||
|
id
|
||||||
|
seasonNumber
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Ensure the response structure is valid
|
||||||
|
if "data" not in res or "contentData" not in res["data"]:
|
||||||
|
raise ValueError("Invalid response structure from Crave API")
|
||||||
|
|
||||||
|
title_information = res["data"]["contentData"]
|
||||||
|
|
||||||
|
# Handle movie titles
|
||||||
|
if title_information["mediaType"] == "MOVIE":
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=self.title,
|
||||||
|
service=self.__class__,
|
||||||
|
name=title_information["title"],
|
||||||
|
year=title_information.get("firstAirYear"),
|
||||||
|
language=title_information.get("originalSpokenLanguage"),
|
||||||
|
data=title_information["firstPlayableContent"],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fetch episodes for each season
|
||||||
|
seasons = title_information.get("seasons", [])
|
||||||
|
episodes = []
|
||||||
|
for season in seasons:
|
||||||
|
res = self.session.post(
|
||||||
|
url="https://www.crave.ca/space-graphql/graphql",
|
||||||
|
json={
|
||||||
|
"operationName": "season",
|
||||||
|
"variables": {"seasonId": season["id"]},
|
||||||
|
"query": """
|
||||||
|
query season($seasonId: ID!) {
|
||||||
|
axisSeason(id: $seasonId) {
|
||||||
|
episodes {
|
||||||
|
axisId
|
||||||
|
title
|
||||||
|
contentType
|
||||||
|
seasonNumber
|
||||||
|
episodeNumber
|
||||||
|
axisPlaybackLanguages {
|
||||||
|
language
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Ensure the response contains episode data
|
||||||
|
if "data" in res and "axisSeason" in res["data"]:
|
||||||
|
season_episodes = res["data"]["axisSeason"].get("episodes", [])
|
||||||
|
episodes.extend(
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("axisId"),
|
||||||
|
title=title_information["title"],
|
||||||
|
year=title_information.get("firstAirYear"),
|
||||||
|
season=episode.get("seasonNumber"),
|
||||||
|
number=episode.get("episodeNumber"),
|
||||||
|
name=episode.get("title"),
|
||||||
|
language=title_information.get("originalSpokenLanguage"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in season_episodes
|
||||||
|
if episode["contentType"] == "EPISODE"
|
||||||
|
)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
package_id = self.session.get(
|
||||||
|
url=self.config["endpoints"]["content_packages"].format(title_id=title.data["axisId"]),
|
||||||
|
params={"$lang": "en"},
|
||||||
|
).json()["Items"][0]["Id"]
|
||||||
|
|
||||||
|
mpd_url = self.config["endpoints"]["manifest"].format(title_id=title.data["axisId"], package_id=package_id)
|
||||||
|
r = self.session.get(
|
||||||
|
mpd_url,
|
||||||
|
params={
|
||||||
|
"jwt": self.access_token,
|
||||||
|
"filter": "25" if self.vcodec == "H265" else "24",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
mpd_data = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
mpd_data = r.text
|
||||||
|
else:
|
||||||
|
raise Exception(
|
||||||
|
"Crave reported an error when obtaining the MPD Manifest.\n"
|
||||||
|
+ f"{mpd_data['Message']} ({mpd_data['ErrorCode']})"
|
||||||
|
)
|
||||||
|
|
||||||
|
tracks.add(DASH.from_text(mpd_data, url=mpd_url).to_tracks(title.language))
|
||||||
|
|
||||||
|
#tracks.add(
|
||||||
|
# Subtitle(
|
||||||
|
# id_=f"{title.data['axisId']}_{package_id}_sub",
|
||||||
|
# url=(
|
||||||
|
# f"{self.config['endpoints']['srt'].format(title_id=title.data['axisId'], package_id=package_id)}?"
|
||||||
|
# + urllib.parse.urlencode({"jwt": urllib.parse.quote_plus(self.access_token)})
|
||||||
|
# ),
|
||||||
|
# codec=Subtitle.Codec.SubRip,
|
||||||
|
# language=None, # TODO: Extract proper language from subtitle metadata
|
||||||
|
# sdh=True,
|
||||||
|
# )
|
||||||
|
#)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_):
|
||||||
|
return None # will use common privacy cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge, **_):
|
||||||
|
return self.session.post(
|
||||||
|
url=self.config["endpoints"]["license"],
|
||||||
|
data=challenge, # expects bytes
|
||||||
|
).content
|
||||||
|
|
||||||
|
def get_axis_id(self, path):
|
||||||
|
res = self.session.post(
|
||||||
|
url="https://www.crave.ca/space-graphql/graphql",
|
||||||
|
json={
|
||||||
|
"operationName": "resolvePath",
|
||||||
|
"variables": {"path": path},
|
||||||
|
"query": """
|
||||||
|
query resolvePath($path: String!) {
|
||||||
|
resolvedPath(path: $path) {
|
||||||
|
lastSegment {
|
||||||
|
content {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
if "errors" in res:
|
||||||
|
if res["errors"][0]["extensions"]["code"] == "NOT_FOUND":
|
||||||
|
return None
|
||||||
|
raise ValueError("Unknown error has occurred when trying to obtain the Axis ID for: " + path)
|
||||||
|
return res["data"]["resolvedPath"]["lastSegment"]["content"]["id"]
|
||||||
|
|
||||||
|
def parse_title(self, ctx, title):
|
||||||
|
title = title or ctx.parent.params.get("title")
|
||||||
|
if not title:
|
||||||
|
self.log.error(" - No title ID specified")
|
||||||
|
if not getattr(self, "TITLE_RE"):
|
||||||
|
self.title = title
|
||||||
|
return {}
|
||||||
|
for regex in as_list(self.TITLE_RE):
|
||||||
|
m = re.search(regex, title)
|
||||||
|
if m:
|
||||||
|
self.title = m.group("id")
|
||||||
|
return m.groupdict()
|
||||||
|
self.log.warning(f" - Unable to parse title ID {title!r}, using as-is")
|
||||||
|
self.title = title
|
||||||
10
services/CRAVE/config.yaml
Normal file
10
services/CRAVE/config.yaml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
endpoints:
|
||||||
|
login: 'https://account.bellmedia.ca/api/login/v2.1'
|
||||||
|
content_packages: 'https://capi.9c9media.com/destinations/crave_atexace/platforms/appletv/contents/{title_id}/contentpackages'
|
||||||
|
contents: 'https://capi.9c9media.com/destinations/crave_atexace/platforms/appletv/contents/{title_id}'
|
||||||
|
manifest: 'https://capi.9c9media.com/destinations/crave_atexace/platforms/appletv/bond/contents/{title_id}/contentpackages/{package_id}/manifest.mpd'
|
||||||
|
srt: 'https://capi.9c9media.com/destinations/crave_atexace/platforms/appletv/bond/contents/{title_id}/contentpackages/{package_id}/manifest.srt'
|
||||||
|
license: 'https://license.9c9media.ca/widevine'
|
||||||
|
|
||||||
|
headers:
|
||||||
|
authorization: 'Basic Y3JhdmUtYW5kcm9pZDpkZWZhdWx0' ## Used on android phones to log in
|
||||||
138
services/CRKL/__init__.py
Normal file
138
services/CRKL/__init__.py
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Optional, Union
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Movies, Movie, Titles_T, Title_T, Series, Episode
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Chapter
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
|
||||||
|
class CRKL(Service):
|
||||||
|
"""
|
||||||
|
Service code for Crackle
|
||||||
|
|
||||||
|
Written by TPD94
|
||||||
|
|
||||||
|
Authorization: None
|
||||||
|
|
||||||
|
Security: FHD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ('US',)
|
||||||
|
|
||||||
|
# Static method, this method belongs to the class
|
||||||
|
@staticmethod
|
||||||
|
|
||||||
|
# The command name, must much the service tag (and by extension the service folder)
|
||||||
|
@click.command(name="CRKL", short_help="https://www.crackle.com", help=__doc__)
|
||||||
|
|
||||||
|
# Using series ID for hulu
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
|
||||||
|
# Pass the context back to the CLI with arguments
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return CRKL(ctx, **kwargs)
|
||||||
|
|
||||||
|
# Accept the CLI arguments by overriding the constructor (The __init__() method)
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
|
||||||
|
# Pass the series_id argument to self so it's accessable across all methods
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
# Overriding the constructor
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'Referer': 'https://www.crackle.com/',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-crackle-brand': 'crackle',
|
||||||
|
'x-crackle-platform': '5FE67CCA-069A-42C6-A20F-4B47A8054D46',
|
||||||
|
'x-crackle-region': 'us',
|
||||||
|
'x-crackle-locale': 'en-us',
|
||||||
|
'x-crackle-apiversion': 'v2.0.0',
|
||||||
|
}
|
||||||
|
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
|
||||||
|
video_type = self.session.get(url=f'https://prod-api.crackle.com/content/{self.title}').json()
|
||||||
|
|
||||||
|
if video_type['data']['type'] == 'Movie':
|
||||||
|
|
||||||
|
movie_metadata = video_type['data']['metadata'][0]
|
||||||
|
|
||||||
|
movie_id = self.session.get(url=f'https://prod-api.crackle.com/content/{self.title}/children').json()['data'][0]['id']
|
||||||
|
|
||||||
|
return Movies([Movie(
|
||||||
|
id_=movie_id,
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie_metadata['title'],
|
||||||
|
year=None,
|
||||||
|
language=None
|
||||||
|
)])
|
||||||
|
|
||||||
|
elif video_type['data']['type'] == 'Series':
|
||||||
|
|
||||||
|
season_ids = []
|
||||||
|
|
||||||
|
seasons_metadata = self.session.get(url=f'https://prod-api.crackle.com/content/{self.title}/children').json()
|
||||||
|
|
||||||
|
for season in seasons_metadata['data']:
|
||||||
|
season_ids.append(season['id'])
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in season_ids:
|
||||||
|
episodes_metadata = self.session.get(url=f'https://prod-api.crackle.com/content/{season}/children').json()
|
||||||
|
|
||||||
|
for episode in episodes_metadata['data']:
|
||||||
|
episodes.append(Episode(
|
||||||
|
id_=episode['id'],
|
||||||
|
service=self.__class__,
|
||||||
|
title=video_type['data']['metadata'][0]['title'],
|
||||||
|
season=episode['seasonNumber'],
|
||||||
|
number=episode['episodeNumber'],
|
||||||
|
name=episode['title'],
|
||||||
|
year=None,
|
||||||
|
language=None
|
||||||
|
))
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
|
||||||
|
title_metadata = self.session.get(url=f'https://prod-api.crackle.com/playback/vod/{title.id}').json()
|
||||||
|
|
||||||
|
for manifest_url in title_metadata['data']['streams']:
|
||||||
|
if manifest_url['type'] == 'dash-widevine' or manifest_url['type'] == 'dash_widevine':
|
||||||
|
index_url = manifest_url['url']
|
||||||
|
base_target_url = index_url[:index_url.find(".com")+len(".com")]
|
||||||
|
mpd_url_metadata = self.session.post(url=index_url).json()['manifestUrl']
|
||||||
|
final_url = base_target_url + mpd_url_metadata
|
||||||
|
return DASH.from_url(final_url).to_tracks(language="en")
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
|
||||||
|
title_metadata = self.session.get(url=f'https://prod-api.crackle.com/content/{title.id}').json()
|
||||||
|
|
||||||
|
return Chapters([Chapter(timestamp=title_metadata['data']['assets']['chapters'][1]['breakpoints'][0]['smpteStart'][:8])])
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
}
|
||||||
|
|
||||||
|
return requests.post(url='https://widevine-license.crackle.com/', data=challenge, headers=headers).content
|
||||||
374
services/CTV/__init__.py
Normal file
374
services/CTV/__init__.py
Normal file
@ -0,0 +1,374 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import click
|
||||||
|
from pywidevine.cdm import Cdm as WidevineCdm
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Subtitle, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class CTV(Service):
|
||||||
|
"""
|
||||||
|
Service code for CTV.ca (https://www.ctv.ca)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Credentials for subscription, none for freely available titles
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 1080p, DD5.1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be either complete title/episode URL or just the path:
|
||||||
|
/shows/young-sheldon
|
||||||
|
/shows/young-sheldon/baptists-catholics-and-an-attempted-drowning-s7e6
|
||||||
|
/movies/war-for-the-planet-of-the-apes
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?ctv\.ca(?:/[a-z]{2})?)?/(?P<type>movies|shows)/(?P<id>[a-z0-9-]+)(?:/(?P<episode>[a-z0-9-]+))?$"
|
||||||
|
GEOFENCE = ("ca",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="CTV", short_help="https://www.ctv.ca", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return CTV(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.authorization: str = None
|
||||||
|
|
||||||
|
self.api = self.config["endpoints"]["api"]
|
||||||
|
self.license_url = self.config["endpoints"]["license"]
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if credential:
|
||||||
|
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||||
|
|
||||||
|
if cache and not cache.expired:
|
||||||
|
# cached
|
||||||
|
self.log.info(" + Using cached Tokens...")
|
||||||
|
tokens = cache.data
|
||||||
|
elif cache and cache.expired:
|
||||||
|
# expired, refresh
|
||||||
|
self.log.info("Refreshing cached Tokens")
|
||||||
|
r = self.session.post(
|
||||||
|
self.config["endpoints"]["login"],
|
||||||
|
headers={"authorization": f"Basic {self.config['endpoints']['auth']}"},
|
||||||
|
data={
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"username": credential.username,
|
||||||
|
"password": credential.password,
|
||||||
|
"refresh_token": cache.data["refresh_token"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to refresh tokens: {r.text}")
|
||||||
|
|
||||||
|
tokens = res
|
||||||
|
self.log.info(" + Refreshed")
|
||||||
|
else:
|
||||||
|
# new
|
||||||
|
r = self.session.post(
|
||||||
|
self.config["endpoints"]["login"],
|
||||||
|
headers={"authorization": f"Basic {self.config['endpoints']['auth']}"},
|
||||||
|
data={
|
||||||
|
"grant_type": "password",
|
||||||
|
"username": credential.username,
|
||||||
|
"password": credential.password,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = r.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to log in: {r.text}")
|
||||||
|
|
||||||
|
tokens = res
|
||||||
|
self.log.info(" + Acquired tokens...")
|
||||||
|
|
||||||
|
cache.set(tokens, expiration=tokens["expires_in"])
|
||||||
|
|
||||||
|
self.authorization = f"Bearer {tokens['access_token']}"
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
payload = {
|
||||||
|
"operationName": "searchMedia",
|
||||||
|
"variables": {"title": f"{self.title}"},
|
||||||
|
"query": """
|
||||||
|
query searchMedia($title: String!) {searchMedia(titleMatches: $title) {
|
||||||
|
... on Medias {page {items {title\npath}}}}}, """,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.post(self.config["endpoints"]["search"], json=payload)
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.log.error(r.text)
|
||||||
|
return
|
||||||
|
|
||||||
|
for result in r.json()["data"]["searchMedia"]["page"]["items"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("path"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("description"),
|
||||||
|
label=result["path"].split("/")[1],
|
||||||
|
url="https://www.ctv.ca" + result.get("path"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
title, kind, episode = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "type", "episode"))
|
||||||
|
title_path = self.get_title_id(kind, title, episode)
|
||||||
|
|
||||||
|
if episode is not None:
|
||||||
|
data = self.get_episode_data(title_path)
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=data["axisId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["axisMedia"]["title"],
|
||||||
|
season=int(data["seasonNumber"]),
|
||||||
|
number=int(data["episodeNumber"]),
|
||||||
|
name=data["title"],
|
||||||
|
year=data.get("firstAirYear"),
|
||||||
|
language=data["axisPlaybackLanguages"][0].get("language", "en"),
|
||||||
|
data=data["axisPlaybackLanguages"][0]["destinationCode"],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if kind == "shows":
|
||||||
|
data = self.get_series_data(title_path)
|
||||||
|
titles = self.fetch_episodes(data["contentData"]["seasons"])
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["axisId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["contentData"]["title"],
|
||||||
|
season=int(episode["seasonNumber"]),
|
||||||
|
number=int(episode["episodeNumber"]),
|
||||||
|
name=episode["title"],
|
||||||
|
year=data["contentData"]["firstAirYear"],
|
||||||
|
language=episode["axisPlaybackLanguages"][0].get("language", "en"),
|
||||||
|
data=episode["axisPlaybackLanguages"][0]["destinationCode"],
|
||||||
|
)
|
||||||
|
for episode in titles
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if kind == "movies":
|
||||||
|
data = self.get_movie_data(title_path)
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=data["contentData"]["firstPlayableContent"]["axisId"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=data["contentData"]["title"],
|
||||||
|
year=data["contentData"]["firstAirYear"],
|
||||||
|
language=data["contentData"]["firstPlayableContent"]["axisPlaybackLanguages"][0].get(
|
||||||
|
"language", "en"
|
||||||
|
),
|
||||||
|
data=data["contentData"]["firstPlayableContent"]["axisPlaybackLanguages"][0]["destinationCode"],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
content = "https://capi.9c9media.com/destinations/{}/platforms/desktop/contents/{}/contentPackages".format(
|
||||||
|
title.data, title.id
|
||||||
|
)
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"$include": "[Desc,Constraints,EndCreditOffset,Breaks,Stacks.ManifestHost.mpd]",
|
||||||
|
}
|
||||||
|
r = self.session.get(content, params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
pkg_id = r.json()["Items"][0]["Id"]
|
||||||
|
manifest = f"{content}/{pkg_id}/manifest.mpd"
|
||||||
|
subtitle = f"{content}/{pkg_id}/manifest.vtt"
|
||||||
|
|
||||||
|
if self.authorization:
|
||||||
|
self.session.headers.update({"authorization": self.authorization})
|
||||||
|
|
||||||
|
tracks = Tracks()
|
||||||
|
for num in ["14", "3", "25", "fe&mca=true&mta=true"]:
|
||||||
|
version = DASH.from_url(url=f"{manifest}?filter={num}", session=self.session).to_tracks(
|
||||||
|
language=title.language
|
||||||
|
)
|
||||||
|
tracks.videos.extend(version.videos)
|
||||||
|
tracks.audio.extend(version.audio)
|
||||||
|
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(subtitle.encode()).hexdigest()[0:6],
|
||||||
|
url=subtitle,
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitle[-3:]),
|
||||||
|
language=title.language,
|
||||||
|
is_original_lang=True,
|
||||||
|
forced=False,
|
||||||
|
sdh=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||||
|
return [] # Chapters not available
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
r = self.session.post(url=self.license_url, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# service specific functions
|
||||||
|
|
||||||
|
def get_title_id(self, kind: str, title: tuple, episode: str) -> str:
|
||||||
|
if episode is not None:
|
||||||
|
title += f"/{episode}"
|
||||||
|
payload = {
|
||||||
|
"operationName": "resolvePath",
|
||||||
|
"variables": {"path": f"{kind}/{title}"},
|
||||||
|
"query": """
|
||||||
|
query resolvePath($path: String!) {
|
||||||
|
resolvedPath(path: $path) {
|
||||||
|
lastSegment {
|
||||||
|
content {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
r = self.session.post(self.api, json=payload).json()
|
||||||
|
return r["data"]["resolvedPath"]["lastSegment"]["content"]["id"]
|
||||||
|
|
||||||
|
def get_series_data(self, title_id: str) -> json:
|
||||||
|
payload = {
|
||||||
|
"operationName": "axisMedia",
|
||||||
|
"variables": {"axisMediaId": f"{title_id}"},
|
||||||
|
"query": """
|
||||||
|
query axisMedia($axisMediaId: ID!) {
|
||||||
|
contentData: axisMedia(id: $axisMediaId) {
|
||||||
|
title
|
||||||
|
description
|
||||||
|
originalSpokenLanguage
|
||||||
|
mediaType
|
||||||
|
firstAirYear
|
||||||
|
seasons {
|
||||||
|
title
|
||||||
|
id
|
||||||
|
seasonNumber
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.session.post(self.api, json=payload).json()["data"]
|
||||||
|
|
||||||
|
def get_movie_data(self, title_id: str) -> json:
|
||||||
|
payload = {
|
||||||
|
"operationName": "axisMedia",
|
||||||
|
"variables": {"axisMediaId": f"{title_id}"},
|
||||||
|
"query": """
|
||||||
|
query axisMedia($axisMediaId: ID!) {
|
||||||
|
contentData: axisMedia(id: $axisMediaId) {
|
||||||
|
title
|
||||||
|
description
|
||||||
|
firstAirYear
|
||||||
|
firstPlayableContent {
|
||||||
|
axisId
|
||||||
|
axisPlaybackLanguages {
|
||||||
|
destinationCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.session.post(self.api, json=payload).json()["data"]
|
||||||
|
|
||||||
|
def get_episode_data(self, title_path: str) -> json:
|
||||||
|
payload = {
|
||||||
|
"operationName": "axisContent",
|
||||||
|
"variables": {"id": f"{title_path}"},
|
||||||
|
"query": """
|
||||||
|
query axisContent($id: ID!) {
|
||||||
|
axisContent(id: $id) {
|
||||||
|
axisId
|
||||||
|
title
|
||||||
|
description
|
||||||
|
contentType
|
||||||
|
seasonNumber
|
||||||
|
episodeNumber
|
||||||
|
axisMedia {
|
||||||
|
title
|
||||||
|
}
|
||||||
|
axisPlaybackLanguages {
|
||||||
|
language
|
||||||
|
destinationCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
return self.session.post(self.api, json=payload).json()["data"]["axisContent"]
|
||||||
|
|
||||||
|
def fetch_episode(self, episode: str) -> json:
|
||||||
|
payload = {
|
||||||
|
"operationName": "season",
|
||||||
|
"variables": {"seasonId": f"{episode}"},
|
||||||
|
"query": """
|
||||||
|
query season($seasonId: ID!) {
|
||||||
|
axisSeason(id: $seasonId) {
|
||||||
|
episodes {
|
||||||
|
axisId
|
||||||
|
title
|
||||||
|
description
|
||||||
|
contentType
|
||||||
|
seasonNumber
|
||||||
|
episodeNumber
|
||||||
|
axisPlaybackLanguages {
|
||||||
|
language
|
||||||
|
destinationCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
response = self.session.post(self.api, json=payload)
|
||||||
|
return response.json()["data"]["axisSeason"]["episodes"]
|
||||||
|
|
||||||
|
def fetch_episodes(self, data: dict) -> list:
|
||||||
|
"""TODO: Switch to async once https proxies are fully supported"""
|
||||||
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
tasks = [executor.submit(self.fetch_episode, x["id"]) for x in data]
|
||||||
|
titles = [future.result() for future in as_completed(tasks)]
|
||||||
|
return [episode for episodes in titles for episode in episodes]
|
||||||
6
services/CTV/config.yaml
Normal file
6
services/CTV/config.yaml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
endpoints:
|
||||||
|
login: https://account.bellmedia.ca/api/login/v2.1
|
||||||
|
auth: Y3R2LXdlYjpkZWZhdWx0
|
||||||
|
api: https://api.ctv.ca/space-graphql/graphql
|
||||||
|
license: https://license.9c9media.ca/widevine
|
||||||
|
search: https://www.ctv.ca/space-graphql/apq/graphql
|
||||||
236
services/DROP/__init__.py
Normal file
236
services/DROP/__init__.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
import re
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import click
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from typing import Optional, Union
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Series
|
||||||
|
from devine.core.tracks import Tracks
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import HLS
|
||||||
|
from devine.core.tracks.attachment import Attachment
|
||||||
|
|
||||||
|
|
||||||
|
class DROP(Service):
|
||||||
|
"""
|
||||||
|
Service code for DROPOUT.tv
|
||||||
|
Author: @sp4rk.y
|
||||||
|
|
||||||
|
Authorization: Cookies or Credentials
|
||||||
|
Security: None
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?dropout\.tv/)([^/]+)(?:/.*)?$"
|
||||||
|
SERIES_RE = r"https?://(?:www\.)?dropout\.tv/([^/]+)(?:/season:(\d+))?/?$"
|
||||||
|
EPISODE_RE = r"https?://(?:www\.)?dropout\.tv/([^/]+)/season:(\d+)/videos/([^/]+)/?$"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="DROP", short_help="https://www.dropout.tv", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return DROP(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
self.credentials = credential
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
elif self.credentials:
|
||||||
|
login_data = {
|
||||||
|
"email": self.credentials.username,
|
||||||
|
"password": self.credentials.password,
|
||||||
|
"authenticity_token": self._get_authenticity_token(),
|
||||||
|
"utf8": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use the URL from the config
|
||||||
|
response = self.session.post(
|
||||||
|
self.config["endpoints"]["login_url"],
|
||||||
|
data=login_data,
|
||||||
|
allow_redirects=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if '<div id="watch-unauthorized"' in response.text:
|
||||||
|
self.log.error("Login failed")
|
||||||
|
raise Exception("Login failed")
|
||||||
|
else:
|
||||||
|
self.log.info("Login successful")
|
||||||
|
else:
|
||||||
|
self.log.info("No login credentials provided, proceeding without authentication")
|
||||||
|
|
||||||
|
def _get_authenticity_token(self):
|
||||||
|
signin_page = self.session.get(self.config["endpoints"]["login_url"]).text
|
||||||
|
match = re.search(r'name="authenticity_token" value="(.+?)"', signin_page)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
else:
|
||||||
|
self.log.error("Could not find authenticity token")
|
||||||
|
raise ValueError("Authenticity token not found")
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Series]:
|
||||||
|
match = re.match(self.SERIES_RE, self.title)
|
||||||
|
if match:
|
||||||
|
title_id = match.group(1)
|
||||||
|
else:
|
||||||
|
title_id = self.title
|
||||||
|
|
||||||
|
base_url = self.config["endpoints"]["episode_metadata_url"].format(title_id=title_id)
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
season_urls = []
|
||||||
|
|
||||||
|
response = self.session.get(base_url)
|
||||||
|
soup = BeautifulSoup(response.text, "html.parser")
|
||||||
|
|
||||||
|
season_select = soup.find("select", class_="js-switch-season")
|
||||||
|
if season_select:
|
||||||
|
for option in season_select.find_all("option"):
|
||||||
|
season_url = urljoin(response.url, option["value"])
|
||||||
|
season_urls.append(season_url)
|
||||||
|
else:
|
||||||
|
season_urls.append(base_url)
|
||||||
|
|
||||||
|
for season_url in season_urls:
|
||||||
|
params = {
|
||||||
|
"page": 1,
|
||||||
|
"per_page": 1000,
|
||||||
|
"html": "1",
|
||||||
|
"ajax": "1",
|
||||||
|
}
|
||||||
|
season_response = self.session.get(season_url, params=params)
|
||||||
|
season_soup = BeautifulSoup(season_response.text, "html.parser")
|
||||||
|
|
||||||
|
season_number_match = re.search(r"/season:(\d+)", season_url)
|
||||||
|
if season_number_match:
|
||||||
|
season_number = int(season_number_match.group(1))
|
||||||
|
else:
|
||||||
|
season_number = None
|
||||||
|
|
||||||
|
items = season_soup.find_all("div", class_="browse-item-card")
|
||||||
|
if not items:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
episode_link = item.find("a", class_="browse-item-link")
|
||||||
|
if episode_link:
|
||||||
|
episode_url = episode_link["href"]
|
||||||
|
episode_data_json = episode_link.get("data-track-event-properties")
|
||||||
|
if episode_data_json:
|
||||||
|
episode_data = json.loads(episode_data_json)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
episode_id = episode_data.get("id")
|
||||||
|
episode_title = episode_data.get("label")
|
||||||
|
|
||||||
|
episode_number_elem = item.find("span", class_="media-identifier media-episode")
|
||||||
|
if episode_number_elem:
|
||||||
|
episode_number_text = episode_number_elem.text.strip()
|
||||||
|
episode_number_match = re.search(r"Episode (\d+)", episode_number_text)
|
||||||
|
if episode_number_match:
|
||||||
|
episode_number = int(episode_number_match.group(1))
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
show_title = self.title.split("/")[-1].replace("-", " ").title()
|
||||||
|
|
||||||
|
episode = Episode(
|
||||||
|
id_=str(episode_id),
|
||||||
|
service=self.__class__,
|
||||||
|
title=show_title,
|
||||||
|
season=season_number,
|
||||||
|
number=episode_number,
|
||||||
|
name=episode_title,
|
||||||
|
year=None,
|
||||||
|
data={"url": episode_url},
|
||||||
|
)
|
||||||
|
episodes.append(episode)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Episode]) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
|
||||||
|
episode_url = title.data["url"]
|
||||||
|
episode_page = self.session.get(episode_url).text
|
||||||
|
|
||||||
|
embed_url_match = re.search(self.config["endpoints"]["embed_url_regex"], episode_page)
|
||||||
|
if not embed_url_match:
|
||||||
|
raise ValueError("Could not find embed_url in the episode page")
|
||||||
|
embed_url = embed_url_match.group(1).replace("&", "&")
|
||||||
|
|
||||||
|
headers = {k: v.format(episode_url=episode_url) for k, v in self.config["headers"].items()}
|
||||||
|
|
||||||
|
# Fetch the embed page content
|
||||||
|
embed_page = self.session.get(embed_url, headers=headers).text
|
||||||
|
|
||||||
|
# Extract the config URL using regex
|
||||||
|
config_url_match = re.search(self.config["endpoints"]["config_url_regex"], embed_page)
|
||||||
|
if config_url_match:
|
||||||
|
config_url = config_url_match.group(1).replace("\\u0026", "&")
|
||||||
|
else:
|
||||||
|
raise ValueError("Config URL not found on the embed page.")
|
||||||
|
|
||||||
|
config_data = self.session.get(config_url, headers=headers).json()
|
||||||
|
|
||||||
|
# Retrieve the CDN information from the config data
|
||||||
|
cdns = config_data["request"]["files"]["hls"]["cdns"]
|
||||||
|
default_cdn = config_data["request"]["files"]["hls"]["default_cdn"]
|
||||||
|
|
||||||
|
# Select the default CDN or fall back to the first available one
|
||||||
|
cdn = cdns.get(default_cdn) or next(iter(cdns.values()))
|
||||||
|
|
||||||
|
# Generate the MPD URL by replacing 'playlist.json' with 'playlist.mpd'
|
||||||
|
mpd_url = cdn["avc_url"].replace("playlist.json", "playlist.mpd")
|
||||||
|
|
||||||
|
tracks = HLS.from_url(url=mpd_url).to_tracks(language="en")
|
||||||
|
|
||||||
|
# Extract thumbnail URL from config_data
|
||||||
|
thumbnail_url = config_data['video']['thumbnail_url']
|
||||||
|
thumbnail_response = self.session.get(thumbnail_url)
|
||||||
|
if thumbnail_response.status_code == 200:
|
||||||
|
thumbnail_filename = f"{title.id}_thumbnail.jpg"
|
||||||
|
thumbnail_path = config.directories.temp / thumbnail_filename
|
||||||
|
|
||||||
|
# Ensure the directory exists
|
||||||
|
os.makedirs(config.directories.temp, exist_ok=True)
|
||||||
|
|
||||||
|
# Save the thumbnail file
|
||||||
|
with open(thumbnail_path, "wb") as f:
|
||||||
|
f.write(thumbnail_response.content)
|
||||||
|
|
||||||
|
# Create an Attachment object
|
||||||
|
thumbnail_attachment = Attachment(
|
||||||
|
path=thumbnail_path,
|
||||||
|
name=thumbnail_filename,
|
||||||
|
mime_type="image/jpeg",
|
||||||
|
description="Thumbnail",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the attachment to the tracks
|
||||||
|
tracks.attachments.append(thumbnail_attachment)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, title: Union[Episode], track):
|
||||||
|
# No DRM
|
||||||
|
pass
|
||||||
15
services/DROP/config.yaml
Normal file
15
services/DROP/config.yaml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
endpoints:
|
||||||
|
login_url: "https://www.dropout.tv/login"
|
||||||
|
episode_metadata_url: "https://www.dropout.tv/{title_id}"
|
||||||
|
embed_url_regex: 'embed_url:\s*"([^"]+)"'
|
||||||
|
config_url_regex: 'config_url":"([^"]+)"'
|
||||||
|
|
||||||
|
headers:
|
||||||
|
referer: "{episode_url}"
|
||||||
|
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||||
|
accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
|
||||||
|
accept_language: "en-US,en;q=0.5"
|
||||||
|
upgrade_insecure_requests: "1"
|
||||||
|
sec_fetch_dest: "iframe"
|
||||||
|
sec_fetch_mode: "navigate"
|
||||||
|
sec_fetch_site: "cross-site"
|
||||||
300
services/DSCP/__init__.py
Normal file
300
services/DSCP/__init__.py
Normal file
@ -0,0 +1,300 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapters, Tracks
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class DSCP(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for Discovery Plus (https://discoveryplus.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Cookies
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 2160p, AAC2.0
|
||||||
|
ClearKey:
|
||||||
|
AES-128: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be either complete title URL or just the path:
|
||||||
|
SHOW: /show/richard-hammonds-workshop
|
||||||
|
EPISODE: /video/richard-hammonds-workshop/new-beginnings
|
||||||
|
SPORT: /video/sport/tnt-sports-1/uefa-champions-league
|
||||||
|
- Use the --lang LANG_RANGE option to request non-english tracks
|
||||||
|
- use -v H.265 to request H.265 UHD tracks (if available)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Using '-v H.265' will request DASH manifest even if no H.265 tracks are available.
|
||||||
|
This can be useful if HLS is not available for some reason.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("dplus", "discoveryplus", "discovery+")
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?discoveryplus\.com(?:/[a-z]{2})?)?/(?P<type>show|video)/(?P<id>[a-z0-9-/]+)"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="DSCP", short_help="https://discoveryplus.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> DSCP:
|
||||||
|
return DSCP(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
self.vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not cookies:
|
||||||
|
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||||
|
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
|
||||||
|
self.base_url = None
|
||||||
|
info = self._request("GET", "https://global-prod.disco-api.com/bootstrapInfo")
|
||||||
|
self.base_url = info["data"]["attributes"].get("baseApiUrl")
|
||||||
|
|
||||||
|
user = self._request("GET", "/users/me")
|
||||||
|
self.territory = user["data"]["attributes"]["currentLocationTerritory"]
|
||||||
|
self.user_language = user["data"]["attributes"]["clientTranslationLanguageTags"][0]
|
||||||
|
self.site_id = user["meta"]["site"]["id"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"include": "default",
|
||||||
|
"decorators": "viewingHistory,isFavorite,playbackAllowed,contentAction,badges",
|
||||||
|
"contentFilter[query]": self.title,
|
||||||
|
"page[items.number]": "1",
|
||||||
|
"page[items.size]": "8",
|
||||||
|
}
|
||||||
|
data = self._request("GET", "/cms/routes/search/result", params=params)
|
||||||
|
|
||||||
|
results = [x.get("attributes") for x in data["included"] if x.get("type") == "show"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=f"/show/{result.get('alternateId')}",
|
||||||
|
title=result.get("name"),
|
||||||
|
description=result.get("description"),
|
||||||
|
label="show",
|
||||||
|
url=f"/show/{result.get('alternateId')}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
try:
|
||||||
|
kind, content_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
if kind == "video":
|
||||||
|
episodes = self._episode(content_id)
|
||||||
|
|
||||||
|
if kind == "show":
|
||||||
|
episodes = self._show(content_id)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
payload = {
|
||||||
|
"videoId": title.id,
|
||||||
|
"deviceInfo": {
|
||||||
|
"adBlocker": "false",
|
||||||
|
"drmSupported": "false",
|
||||||
|
"hwDecodingCapabilities": ["H264", "H265"],
|
||||||
|
"screen": {"width": 3840, "height": 2160},
|
||||||
|
"player": {"width": 3840, "height": 2160},
|
||||||
|
},
|
||||||
|
"wisteriaProperties": {
|
||||||
|
"product": "dplus_emea",
|
||||||
|
"sessionId": str(uuid.uuid1()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.vcodec == "H.265":
|
||||||
|
payload["wisteriaProperties"]["device"] = {
|
||||||
|
"browser": {"name": "chrome", "version": "96.0.4664.55"},
|
||||||
|
"type": "firetv",
|
||||||
|
}
|
||||||
|
payload["wisteriaProperties"]["platform"] = "firetv"
|
||||||
|
|
||||||
|
res = self._request("POST", "/playback/v3/videoPlaybackInfo", payload=payload)
|
||||||
|
|
||||||
|
streaming = res["data"]["attributes"]["streaming"][0]
|
||||||
|
streaming_type = streaming["type"].strip().lower()
|
||||||
|
manifest = streaming["url"]
|
||||||
|
|
||||||
|
self.token = None
|
||||||
|
self.license = None
|
||||||
|
if streaming["protection"]["drmEnabled"]:
|
||||||
|
self.token = streaming["protection"]["drmToken"]
|
||||||
|
self.license = streaming["protection"]["schemes"]["widevine"]["licenseUrl"]
|
||||||
|
|
||||||
|
if streaming_type == "hls":
|
||||||
|
tracks = HLS.from_url(url=manifest, session=self.session).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
elif streaming_type == "dash":
|
||||||
|
tracks = DASH.from_url(url=manifest, session=self.session).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown streaming type: {streaming_type}")
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
if not self.license:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r = self.session.post(self.license, headers={"Preauthorization": self.token}, data=challenge)
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def _show(self, title: str) -> Episode:
|
||||||
|
params = {
|
||||||
|
"include": "default",
|
||||||
|
"decorators": "playbackAllowed,contentAction,badges",
|
||||||
|
}
|
||||||
|
data = self._request("GET", "/cms/routes/show/{}".format(title), params=params)
|
||||||
|
|
||||||
|
content = next(x for x in data["included"] if x["attributes"].get("alias") == "generic-show-episodes")
|
||||||
|
content_id = content["id"]
|
||||||
|
show_id = content["attributes"]["component"]["mandatoryParams"]
|
||||||
|
season_params = [x.get("parameter") for x in content["attributes"]["component"]["filters"][0]["options"]]
|
||||||
|
page = next(x for x in data["included"] if x.get("type", "") == "page")
|
||||||
|
|
||||||
|
seasons = [
|
||||||
|
self._request(
|
||||||
|
"GET", "/cms/collections/{}?{}&{}".format(content_id, season, show_id),
|
||||||
|
params={"include": "default", "decorators": "playbackAllowed,contentAction,badges"},
|
||||||
|
)
|
||||||
|
for season in season_params
|
||||||
|
]
|
||||||
|
|
||||||
|
videos = [[x for x in season["included"] if x["type"] == "video"] for season in seasons]
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=ep["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=page["attributes"]["title"],
|
||||||
|
year=ep["attributes"]["airDate"][:4],
|
||||||
|
season=ep["attributes"].get("seasonNumber"),
|
||||||
|
number=ep["attributes"].get("episodeNumber"),
|
||||||
|
name=ep["attributes"]["name"],
|
||||||
|
language=ep["attributes"]["audioTracks"][0]
|
||||||
|
if ep["attributes"].get("audioTracks")
|
||||||
|
else self.user_language,
|
||||||
|
data=ep,
|
||||||
|
)
|
||||||
|
for episodes in videos
|
||||||
|
for ep in episodes
|
||||||
|
if ep["attributes"]["videoType"] == "EPISODE"
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, title: str) -> Episode:
|
||||||
|
params = {
|
||||||
|
"include": "default",
|
||||||
|
"decorators": "playbackAllowed,contentAction,badges",
|
||||||
|
}
|
||||||
|
data = self._request("GET", "/cms/routes/video/{}".format(title), params=params)
|
||||||
|
page = next((x for x in data["included"] if x.get("type", "") == "page"), None)
|
||||||
|
if not page:
|
||||||
|
raise IndexError("Episode page not found")
|
||||||
|
|
||||||
|
video_id = page["relationships"].get("primaryContent", {}).get("data", {}).get("id")
|
||||||
|
if not video_id:
|
||||||
|
raise IndexError("Episode id not found")
|
||||||
|
|
||||||
|
params = {"decorators": "isFavorite", "include": "primaryChannel"}
|
||||||
|
content = self._request("GET", "/content/videos/{}".format(video_id), params=params)
|
||||||
|
episode = content["data"]["attributes"]
|
||||||
|
name = episode.get("name")
|
||||||
|
if episode.get("secondaryTitle"):
|
||||||
|
name += f" {episode.get('secondaryTitle')}"
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=content["data"].get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=page["attributes"]["title"],
|
||||||
|
year=int(episode.get("airDate")[:4]) if episode.get("airDate") else None,
|
||||||
|
season=episode.get("seasonNumber") or 0,
|
||||||
|
number=episode.get("episodeNumber") or 0,
|
||||||
|
name=name,
|
||||||
|
language=episode["audioTracks"][0] if episode.get("audioTracks") else self.user_language,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
api: str,
|
||||||
|
params: dict = None,
|
||||||
|
headers: dict = None,
|
||||||
|
payload: dict = None,
|
||||||
|
) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.base_url, api)
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
|
||||||
|
if params:
|
||||||
|
self.session.params.update(params)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, url, json=payload))
|
||||||
|
response = self.session.send(prep)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(response.content)
|
||||||
|
|
||||||
|
if data.get("errors"):
|
||||||
|
if "invalid.token" in data["errors"][0]["code"]:
|
||||||
|
self.log.error("- Invalid Token. Cookies are invalid or may have expired.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if "missingpackage" in data["errors"][0]["code"]:
|
||||||
|
self.log.error("- Access Denied. Title is not available for this subscription.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
raise ConnectionError(data["errors"])
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ConnectionError("Request failed: {}".format(e))
|
||||||
4
services/DSCP/config.yaml
Normal file
4
services/DSCP/config.yaml
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
headers:
|
||||||
|
user-agent: Chrome/96.0.4664.55
|
||||||
|
x-disco-client: WEB:UNKNOWN:dplus_us:2.44.4
|
||||||
|
x-disco-params: realm=go,siteLookupKey=dplus_us,bid=dplus,hn=www.discoveryplus.com,hth=,uat=false
|
||||||
630
services/DSNP/__init__.py
Normal file
630
services/DSNP/__init__.py
Normal file
@ -0,0 +1,630 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Video, Chapter
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
|
||||||
|
from . import queries
|
||||||
|
|
||||||
|
|
||||||
|
class DSNP(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for DisneyPlus streaming service (https://www.disneyplus.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Credentials
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L1: 2160p, 1080p
|
||||||
|
L3: 720p
|
||||||
|
PlayReady:
|
||||||
|
SL3: 2160p, 1080p
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be only the entity ID for both series and movies:
|
||||||
|
MOVIE: entity-99e15d53-926e-4074-b9f4-6524d10c8bed
|
||||||
|
SERIES: entity-30429ad6-dd12-41bf-924e-19131fa66bb5
|
||||||
|
- Use the --lang LANG_RANGE option to request non-english tracks
|
||||||
|
- CDM level dictates playback quality (L3 == 720p, L1 == 1080p, 2160p)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- On first run, the program will look for the first account profile that doesn't
|
||||||
|
have kids mode or pin protection enabled. If none are found, the program will exit.
|
||||||
|
- The profile will be cached and re-used until cache is cleared.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="DSNP", short_help="https://www.disneyplus.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> DSNP:
|
||||||
|
return DSNP(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.cdm = ctx.obj.cdm
|
||||||
|
self.playback_data = {}
|
||||||
|
|
||||||
|
vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
range = ctx.parent.params.get("range_")
|
||||||
|
|
||||||
|
self.range = range[0].name if range else "SDR"
|
||||||
|
self.vcodec = "H265" if vcodec and vcodec == Video.Codec.HEVC else "H264"
|
||||||
|
if self.range != "SDR" and self.vcodec != "H265":
|
||||||
|
self.vcodec = "H265"
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not credential:
|
||||||
|
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||||
|
|
||||||
|
self.session.headers.update(self.config["HEADERS"])
|
||||||
|
self.session.headers.update({"x-bamsdk-transaction-id": str(uuid.uuid4())})
|
||||||
|
self.prd_config = self.session.get(self.config["CONFIG_URL"]).json()
|
||||||
|
|
||||||
|
self._cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||||
|
if self._cache:
|
||||||
|
self.log.info(" + Refreshing Tokens")
|
||||||
|
profile = self.refresh_token(self._cache.data["token"]["refreshToken"])
|
||||||
|
self._cache.set(profile, expiration=profile["token"]["expiresIn"] - 30)
|
||||||
|
token = self._cache.data["token"]["accessToken"]
|
||||||
|
self.session.headers.update({"Authorization": "Bearer {}".format(token)})
|
||||||
|
self.active_session = self.account()["activeSession"]
|
||||||
|
else:
|
||||||
|
self.log.info(" + Setting up new profile...")
|
||||||
|
token = self.register_device()
|
||||||
|
status = self.check_email(credential.username, token)
|
||||||
|
if status.lower() == "register":
|
||||||
|
raise ValueError("Account is not registered. Please register first.")
|
||||||
|
elif status.lower() == "otp":
|
||||||
|
self.log.error(" - Account requires passcode for login.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
tokens = self.login(credential.username, credential.password, token)
|
||||||
|
self.session.headers.update({"Authorization": "Bearer {}".format(tokens["accessToken"])})
|
||||||
|
account = self.account()
|
||||||
|
profile_id = next(
|
||||||
|
(
|
||||||
|
x.get("id")
|
||||||
|
for x in account["account"]["profiles"]
|
||||||
|
if not x["attributes"]["kidsModeEnabled"]
|
||||||
|
and not x["attributes"]["parentalControls"]["isPinProtected"]
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if not profile_id:
|
||||||
|
self.log.error(
|
||||||
|
" - Missing profile - you need at least one profile with kids mode and pin protection disabled"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
set_profile = self.switch_profile(profile_id)
|
||||||
|
profile = self.refresh_token(set_profile["token"]["refreshToken"])
|
||||||
|
self._cache.set(profile, expiration=profile["token"]["expiresIn"] - 30)
|
||||||
|
token = self._cache.data["token"]["accessToken"]
|
||||||
|
self.session.headers.update({"Authorization": "Bearer {}".format(token)})
|
||||||
|
self.active_session = self.account()["activeSession"]
|
||||||
|
|
||||||
|
self.log.info(" + Acquired tokens...")
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"query": self.title,
|
||||||
|
}
|
||||||
|
endpoint = self.href(
|
||||||
|
self.prd_config["services"]["explore"]["client"]["endpoints"]["search"]["href"],
|
||||||
|
version=self.config["EXPLORE_VERSION"],
|
||||||
|
)
|
||||||
|
data = self._request("GET", endpoint, params=params)["data"]["page"]
|
||||||
|
if not data.get("containers"):
|
||||||
|
return
|
||||||
|
|
||||||
|
results = data["containers"][0]["items"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
entity = "entity-" + result.get("id")
|
||||||
|
yield SearchResult(
|
||||||
|
id_=entity,
|
||||||
|
title=result["visuals"].get("title"),
|
||||||
|
description=result["visuals"]["description"].get("brief"),
|
||||||
|
label=result["visuals"]["metastringParts"].get("releaseYearRange", {}).get("startYear"),
|
||||||
|
url=f"https://www.disneyplus.com/browse/{entity}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
if not self.title.startswith("entity"):
|
||||||
|
raise ValueError("Invalid input - Use only entity IDs.")
|
||||||
|
|
||||||
|
content = self.get_deeplink(self.title)
|
||||||
|
_type = content["data"]["deeplink"]["actions"][0]["contentType"]
|
||||||
|
|
||||||
|
if _type == "movie":
|
||||||
|
movie = self._movie(self.title)
|
||||||
|
return Movies(movie)
|
||||||
|
|
||||||
|
elif _type == "series":
|
||||||
|
episodes = self._show(self.title)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
resource_id = title.data.get("resourceId")
|
||||||
|
content_id = title.data["partnerFeed"].get("dmcContentId")
|
||||||
|
content = self.get_video(content_id)
|
||||||
|
playback = content["video"]["mediaMetadata"]["playbackUrls"][0]["href"]
|
||||||
|
|
||||||
|
token = self._refresh()
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"accept": "application/vnd.media-service+json; version=5",
|
||||||
|
"authorization": token,
|
||||||
|
"x-dss-feature-filtering": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"playbackId": resource_id,
|
||||||
|
"playback": {
|
||||||
|
"attributes": {
|
||||||
|
"codecs": {
|
||||||
|
"supportsMultiCodecMaster": False,
|
||||||
|
},
|
||||||
|
"protocol": "HTTPS",
|
||||||
|
# "ads": "",
|
||||||
|
"frameRates": [60],
|
||||||
|
"assetInsertionStrategy": "SGAI",
|
||||||
|
"playbackInitializationContext": "ONLINE",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
video_ranges = []
|
||||||
|
audio_types = []
|
||||||
|
|
||||||
|
audio_types.append("ATMOS")
|
||||||
|
audio_types.append("DTS_X")
|
||||||
|
|
||||||
|
if not self.cdm.security_level == 3 and self.range == "DV":
|
||||||
|
video_ranges.append("DOLBY_VISION")
|
||||||
|
|
||||||
|
if not self.cdm.security_level == 3 and self.range == "HDR10":
|
||||||
|
video_ranges.append("HDR10")
|
||||||
|
|
||||||
|
if self.vcodec == "H265":
|
||||||
|
payload["playback"]["attributes"]["codecs"] = {"video": ["h264", "h265"]}
|
||||||
|
|
||||||
|
if audio_types:
|
||||||
|
payload["playback"]["attributes"]["audioTypes"] = audio_types
|
||||||
|
|
||||||
|
if video_ranges:
|
||||||
|
payload["playback"]["attributes"]["videoRanges"] = video_ranges
|
||||||
|
|
||||||
|
if self.cdm.security_level == 3:
|
||||||
|
payload["playback"]["attributes"]["resolution"] = {"max": ["1280x720"]}
|
||||||
|
|
||||||
|
scenario = "ctr-regular" if self.cdm.security_level == 3 else "ctr-high"
|
||||||
|
endpoint = playback.format(scenario=scenario)
|
||||||
|
|
||||||
|
res = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||||
|
self.playback_data[title.id] = self._request(
|
||||||
|
"POST", f"https://disney.playback.edge.bamgrid.com/v7/playback/{scenario}", payload=payload, headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
manifest = res["stream"]["complete"][0]["url"]
|
||||||
|
|
||||||
|
tracks = HLS.from_url(url=manifest, session=self.session).to_tracks(language="en-US")
|
||||||
|
for audio in tracks.audio:
|
||||||
|
bitrate = re.search(
|
||||||
|
r"(?<=r/composite_)\d+|\d+(?=_complete.m3u8)",
|
||||||
|
as_list(audio.url)[0],
|
||||||
|
)
|
||||||
|
audio.bitrate = int(bitrate.group()) * 1000
|
||||||
|
if audio.bitrate == 1000_000:
|
||||||
|
# DSNP lies about the Atmos bitrate
|
||||||
|
audio.bitrate = 768_000
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
"""
|
||||||
|
Extract chapter information from the title data if available.
|
||||||
|
Returns chapter markers for intro, credits, and scenes.
|
||||||
|
"""
|
||||||
|
chapters = Chapters()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# First try to get chapters from the new API via playback data
|
||||||
|
if title.id in self.playback_data and "stream" in self.playback_data[title.id]:
|
||||||
|
playback_res = self.playback_data[title.id]
|
||||||
|
|
||||||
|
# Check for editorial markers in playback data
|
||||||
|
if "editorial" in playback_res.get("stream", {}):
|
||||||
|
editorial = playback_res["stream"]["editorial"]
|
||||||
|
|
||||||
|
# Add "Start" chapter if not already present
|
||||||
|
if not any(item.get("offsetMillis") == 0 for item in editorial):
|
||||||
|
chapters.add(Chapter(timestamp=0, name="Start"))
|
||||||
|
|
||||||
|
# Map editorial labels to chapter names
|
||||||
|
mapping = {
|
||||||
|
"recap_start": "Recap",
|
||||||
|
"FFER": "Recap", # First Frame Episode Recap
|
||||||
|
"recap_end": "Scene",
|
||||||
|
"LFER": "Scene", # Last Frame Episode Recap
|
||||||
|
"intro_start": "Title Sequence",
|
||||||
|
"intro_end": "Scene",
|
||||||
|
"FFEI": "Title Sequence", # First Frame Episode Intro
|
||||||
|
"LFEI": "Scene", # Last Frame Episode Intro
|
||||||
|
"FFCB": None, # First Frame Credits Bumper
|
||||||
|
"LFCB": "Scene", # Last Frame Credits Bumper
|
||||||
|
"FFEC": "End Credits", # First Frame End Credits
|
||||||
|
"LFEC": None, # Last Frame End Credits
|
||||||
|
"up_next": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sort by timestamp to ensure proper scene numbering
|
||||||
|
editorial.sort(key=lambda x: x.get("offsetMillis", 0))
|
||||||
|
|
||||||
|
# Track chapters we've already added by timestamp to avoid duplicates
|
||||||
|
seen_timestamps = set()
|
||||||
|
scene_count = 0
|
||||||
|
|
||||||
|
for marker in editorial:
|
||||||
|
if "label" in marker and "offsetMillis" in marker:
|
||||||
|
timestamp = marker["offsetMillis"]
|
||||||
|
name = mapping.get(marker["label"])
|
||||||
|
|
||||||
|
# Skip if no mapping or already processed timestamp
|
||||||
|
if not name or timestamp in seen_timestamps:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Mark this timestamp as seen
|
||||||
|
seen_timestamps.add(timestamp)
|
||||||
|
|
||||||
|
if name == "Scene":
|
||||||
|
scene_count += 1
|
||||||
|
name = f"Scene {scene_count}"
|
||||||
|
|
||||||
|
chapters.add(Chapter(timestamp=timestamp, name=name))
|
||||||
|
|
||||||
|
# If we found chapters in the playback data, return them
|
||||||
|
if chapters:
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
# If no chapters found in playback data, try the original method
|
||||||
|
content_id = title.data["partnerFeed"].get("dmcContentId")
|
||||||
|
content = self.get_video(content_id)
|
||||||
|
|
||||||
|
# Check for chapter/milestone data
|
||||||
|
video_info = content.get("video", {}).get("milestone", {})
|
||||||
|
|
||||||
|
if not video_info:
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
# Mapping of milestone types to chapter names
|
||||||
|
mapping = {
|
||||||
|
"recap_start": "Recap",
|
||||||
|
"recap_end": "Scene",
|
||||||
|
"intro_start": "Title Sequence",
|
||||||
|
"intro_end": "Scene",
|
||||||
|
"FFEI": "Title Sequence", # First Frame Episode Intro
|
||||||
|
"LFEI": "Scene", # Last Frame Episode Intro
|
||||||
|
"FFCB": None, # First Frame Credits Bumper
|
||||||
|
"LFCB": "Scene", # Last Frame Credits Bumper
|
||||||
|
"FFEC": "End Credits", # First Frame End Credits
|
||||||
|
"LFEC": None, # Last Frame End Credits
|
||||||
|
"up_next": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Flatten the milestone data and sort by start time
|
||||||
|
flattened = []
|
||||||
|
for chapter_type, items in video_info.items():
|
||||||
|
for entry in items:
|
||||||
|
if "milestoneTime" in entry and entry["milestoneTime"]:
|
||||||
|
start = entry["milestoneTime"][0]["startMillis"]
|
||||||
|
flattened.append({"type": chapter_type, "start": start})
|
||||||
|
|
||||||
|
flattened.sort(key=lambda x: x["start"])
|
||||||
|
|
||||||
|
# Create chapters
|
||||||
|
chapter_list = []
|
||||||
|
scene_count = 0
|
||||||
|
for f in flattened:
|
||||||
|
name = mapping.get(f["type"])
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if name == "Scene":
|
||||||
|
scene_count += 1
|
||||||
|
name = f"Scene {scene_count}"
|
||||||
|
|
||||||
|
chapter_list.append(Chapter(timestamp=f["start"], name=name))
|
||||||
|
|
||||||
|
# Add a "Start" chapter at 0 if we have end credits
|
||||||
|
if "FFEC" in video_info and not any(ch.timestamp == 0 for ch in chapter_list):
|
||||||
|
chapter_list.insert(0, Chapter(timestamp=0, name="Start"))
|
||||||
|
|
||||||
|
# Remove duplicates (same time and name)
|
||||||
|
prev_time, prev_name = None, None
|
||||||
|
|
||||||
|
for ch in chapter_list:
|
||||||
|
# Convert timestamp to milliseconds for comparison
|
||||||
|
if isinstance(ch.timestamp, str):
|
||||||
|
ts_parts = ch.timestamp.split(":")
|
||||||
|
hour, minute, second = int(ts_parts[0]), int(ts_parts[1]), float(ts_parts[2])
|
||||||
|
ts_ms = (hour * 3600 + minute * 60 + second) * 1000
|
||||||
|
else:
|
||||||
|
ts_ms = ch.timestamp
|
||||||
|
|
||||||
|
if prev_time is None or (ts_ms != prev_time and ch.name != prev_name):
|
||||||
|
chapters.add(ch)
|
||||||
|
prev_time, prev_name = ts_ms, ch.name
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f"Failed to extract chapters: {e}")
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title, track) -> None:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {self._cache.data['token']['accessToken']}",
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
}
|
||||||
|
r = self.session.post(url=self.config["LICENSE"], headers=headers, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def _show(self, title: str) -> Episode:
|
||||||
|
page = self.get_page(title)
|
||||||
|
container = next(x for x in page["containers"] if x.get("type") == "episodes")
|
||||||
|
season_ids = [x.get("id") for x in container["seasons"] if x.get("type") == "season"]
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in season_ids:
|
||||||
|
endpoint = self.href(
|
||||||
|
self.prd_config["services"]["explore"]["client"]["endpoints"]["getSeason"]["href"],
|
||||||
|
version=self.config["EXPLORE_VERSION"],
|
||||||
|
seasonId=season,
|
||||||
|
)
|
||||||
|
data = self.session.get(endpoint).json()["data"]["season"]["items"]
|
||||||
|
episodes.extend(data)
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode["visuals"].get("title"),
|
||||||
|
year=episode["visuals"]["metastringParts"].get("releaseYearRange", {}).get("startYear"),
|
||||||
|
season=int(episode["visuals"].get("seasonNumber", 0)),
|
||||||
|
number=int(episode["visuals"].get("episodeNumber", 0)),
|
||||||
|
name=episode["visuals"].get("episodeTitle"),
|
||||||
|
data=next(x for x in episode["actions"] if x.get("type") == "playback"),
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
if episode.get("type") == "view"
|
||||||
|
]
|
||||||
|
|
||||||
|
def _movie(self, title: str) -> Movie:
|
||||||
|
movie = self.get_page(title)
|
||||||
|
|
||||||
|
return [
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie["visuals"].get("title"),
|
||||||
|
year=movie["visuals"]["metastringParts"].get("releaseYearRange", {}).get("startYear"),
|
||||||
|
data=next(x for x in movie["actions"] if x.get("type") == "playback"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
endpoint: str,
|
||||||
|
params: dict = None,
|
||||||
|
headers: dict = None,
|
||||||
|
payload: dict = None,
|
||||||
|
) -> Any[dict | str]:
|
||||||
|
_headers = headers if headers else self.session.headers
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, endpoint, headers=_headers, params=params, json=payload))
|
||||||
|
response = self.session.send(prep)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if data.get("errors"):
|
||||||
|
code = data["errors"][0]["extensions"].get("code")
|
||||||
|
|
||||||
|
if "token.service.unauthorized.client" in code:
|
||||||
|
raise ConnectionError("Unauthorized Client/IP: " + code)
|
||||||
|
if "idp.error.identity.bad-credentials" in code:
|
||||||
|
raise ConnectionError("Bad Credentials: " + code)
|
||||||
|
else:
|
||||||
|
raise ConnectionError(data["errors"])
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ConnectionError("Request failed: {}".format(response.content))
|
||||||
|
|
||||||
|
def get_page(self, title):
|
||||||
|
params = {
|
||||||
|
"disableSmartFocus": "true",
|
||||||
|
"limit": 999,
|
||||||
|
"enhancedContainersLimit": 0,
|
||||||
|
}
|
||||||
|
endpoint = self.href(
|
||||||
|
self.prd_config["services"]["explore"]["client"]["endpoints"]["getPage"]["href"],
|
||||||
|
version=self.config["EXPLORE_VERSION"],
|
||||||
|
pageId=title,
|
||||||
|
)
|
||||||
|
return self._request("GET", endpoint, params=params)["data"]["page"]
|
||||||
|
|
||||||
|
def get_video(self, content_id: str) -> dict:
|
||||||
|
endpoint = self.href(
|
||||||
|
self.prd_config["services"]["content"]["client"]["endpoints"]["getDmcVideo"]["href"], contentId=content_id
|
||||||
|
)
|
||||||
|
return self._request("GET", endpoint)["data"]["DmcVideo"]
|
||||||
|
|
||||||
|
def get_deeplink(self, ref_id: str) -> str:
|
||||||
|
params = {
|
||||||
|
"refId": ref_id,
|
||||||
|
"refIdType": "deeplinkId",
|
||||||
|
}
|
||||||
|
endpoint = "https://disney.content.edge.bamgrid.com/explore/v1.0/deeplink"
|
||||||
|
return self._request("GET", endpoint, params=params)
|
||||||
|
|
||||||
|
def series_bundle(self, series_id: str) -> dict:
|
||||||
|
endpoint = self.href(
|
||||||
|
self.prd_config["services"]["content"]["client"]["endpoints"]["getDmcSeriesBundle"]["href"],
|
||||||
|
encodedSeriesId=series_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.session.get(endpoint).json()["data"]["DmcSeriesBundle"]
|
||||||
|
|
||||||
|
def refresh_token(self, refresh_token: str):
|
||||||
|
payload = {
|
||||||
|
"operationName": "refreshToken",
|
||||||
|
"variables": {
|
||||||
|
"input": {
|
||||||
|
"refreshToken": refresh_token,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"query": queries.REFRESH_TOKEN,
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["refreshToken"]["href"]
|
||||||
|
data = self._request("POST", endpoint, payload=payload, headers={"authorization": self.config["API_KEY"]})
|
||||||
|
return data["extensions"]["sdk"]
|
||||||
|
|
||||||
|
def _refresh(self):
|
||||||
|
if not self._cache.expired:
|
||||||
|
return self._cache.data["token"]["accessToken"]
|
||||||
|
|
||||||
|
profile = self.refresh_token(self._cache.data["token"]["refreshToken"])
|
||||||
|
self._cache.set(profile, expiration=profile["token"]["expiresIn"] - 30)
|
||||||
|
return self._cache.data["token"]["accessToken"]
|
||||||
|
|
||||||
|
def register_device(self) -> dict:
|
||||||
|
payload = {
|
||||||
|
"variables": {
|
||||||
|
"registerDevice": {
|
||||||
|
"applicationRuntime": "android",
|
||||||
|
"attributes": {
|
||||||
|
"operatingSystem": "Android",
|
||||||
|
"operatingSystemVersion": "8.1.0",
|
||||||
|
},
|
||||||
|
"deviceFamily": "android",
|
||||||
|
"deviceLanguage": "en",
|
||||||
|
"deviceProfile": "tv",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"query": queries.REGISTER_DEVICE,
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["registerDevice"]["href"]
|
||||||
|
data = self._request("POST", endpoint, payload=payload, headers={"authorization": self.config["API_KEY"]})
|
||||||
|
return data["extensions"]["sdk"]["token"]["accessToken"]
|
||||||
|
|
||||||
|
def login(self, email: str, password: str, token: str) -> dict:
|
||||||
|
payload = {
|
||||||
|
"operationName": "loginTv",
|
||||||
|
"variables": {
|
||||||
|
"input": {
|
||||||
|
"email": email,
|
||||||
|
"password": password,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"query": queries.LOGIN,
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||||
|
data = self._request("POST", endpoint, payload=payload, headers={"authorization": token})
|
||||||
|
return data["extensions"]["sdk"]["token"]
|
||||||
|
|
||||||
|
def href(self, href, **kwargs) -> str:
|
||||||
|
_args = {
|
||||||
|
"apiVersion": "{apiVersion}",
|
||||||
|
"region": self.active_session["location"]["countryCode"],
|
||||||
|
"impliedMaturityRating": 1850,
|
||||||
|
"kidsModeEnabled": "false",
|
||||||
|
"appLanguage": "en-US",
|
||||||
|
"partner": "disney",
|
||||||
|
}
|
||||||
|
_args.update(**kwargs)
|
||||||
|
|
||||||
|
href = href.format(**_args)
|
||||||
|
|
||||||
|
# [3.0, 3.1, 3.2, 5.0, 3.3, 5.1, 6.0, 5.2, 6.1]
|
||||||
|
api_version = "6.1"
|
||||||
|
if "/search/" in href:
|
||||||
|
api_version = "5.1"
|
||||||
|
|
||||||
|
return href.format(apiVersion=api_version)
|
||||||
|
|
||||||
|
def check_email(self, email: str, token: str) -> str:
|
||||||
|
payload = {
|
||||||
|
"operationName": "Check",
|
||||||
|
"variables": {
|
||||||
|
"email": email,
|
||||||
|
},
|
||||||
|
"query": queries.CHECK_EMAIL,
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||||
|
data = self._request("POST", endpoint, payload=payload, headers={"authorization": token})
|
||||||
|
return data["data"]["check"]["operations"][0]
|
||||||
|
|
||||||
|
def account(self) -> dict:
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"operationName": "EntitledGraphMeQuery",
|
||||||
|
"variables": {},
|
||||||
|
"query": queries.ENTITLEMENTS,
|
||||||
|
}
|
||||||
|
|
||||||
|
data = self._request("POST", endpoint, payload=payload)
|
||||||
|
return data["data"]["me"]
|
||||||
|
|
||||||
|
def switch_profile(self, profile_id: str) -> dict:
|
||||||
|
payload = {
|
||||||
|
"operationName": "switchProfile",
|
||||||
|
"variables": {
|
||||||
|
"input": {
|
||||||
|
"profileId": profile_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"query": queries.SWITCH_PROFILE,
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoint = self.prd_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||||
|
data = self._request("POST", endpoint, payload=payload)
|
||||||
|
return data["extensions"]["sdk"]
|
||||||
22
services/DSNP/config.yaml
Normal file
22
services/DSNP/config.yaml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
CLIENT_ID: disney-svod-3d9324fc
|
||||||
|
CLIENT_VERSION: "9.10.0"
|
||||||
|
|
||||||
|
API_KEY: "ZGlzbmV5JmFuZHJvaWQmMS4wLjA.bkeb0m230uUhv8qrAXuNu39tbE_mD5EEhM_NAcohjyA"
|
||||||
|
CONFIG_URL: https://bam-sdk-configs.bamgrid.com/bam-sdk/v5.0/disney-svod-3d9324fc/android/v9.10.0/google/tv/prod.json
|
||||||
|
|
||||||
|
PAGE_SIZE_SETS: 15
|
||||||
|
PAGE_SIZE_CONTENT: 30
|
||||||
|
SEARCH_QUERY_TYPE: ge
|
||||||
|
BAM_PARTNER: disney
|
||||||
|
EXPLORE_VERSION: v1.3
|
||||||
|
|
||||||
|
HEADERS:
|
||||||
|
User-Agent: BAMSDK/v9.10.0 (disney-svod-3d9324fc 2.26.2-rc1.0; v5.0/v9.10.0; android; tv)
|
||||||
|
x-application-version: google
|
||||||
|
x-bamsdk-platform-id: android-tv
|
||||||
|
x-bamsdk-client-id: disney-svod-3d9324fc
|
||||||
|
x-bamsdk-platform: android-tv
|
||||||
|
x-bamsdk-version: "9.10.0"
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
|
||||||
|
LICENSE: https://disney.playback.edge.bamgrid.com/widevine/v1/obtain-license
|
||||||
13
services/DSNP/queries.py
Normal file
13
services/DSNP/queries.py
Normal file
File diff suppressed because one or more lines are too long
139
services/HM/__init__.py
Normal file
139
services/HM/__init__.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Optional, Union
|
||||||
|
import click
|
||||||
|
import uuid
|
||||||
|
import requests
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Movies, Movie, Titles_T, Title_T, Series, Episode
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks import Chapters, Tracks
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
|
||||||
|
class HM(Service):
|
||||||
|
"""
|
||||||
|
Service code for Hallmark Now
|
||||||
|
|
||||||
|
Written by TPD94
|
||||||
|
|
||||||
|
Authorization: Cookies
|
||||||
|
|
||||||
|
Security: FHD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ('US',)
|
||||||
|
|
||||||
|
# Static method, this method belongs to the class
|
||||||
|
@staticmethod
|
||||||
|
|
||||||
|
# The command name, must much the service tag (and by extension the service folder)
|
||||||
|
@click.command(name="HM", short_help="https://www.hmnow.com/", help=__doc__)
|
||||||
|
|
||||||
|
# Using series ID for crunchyroll
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
|
||||||
|
# Option if it is a movie
|
||||||
|
@click.option("--movie", is_flag=True, help="Specify if it's a movie")
|
||||||
|
|
||||||
|
# Pass the context back to the CLI with arguments
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return HM(ctx, **kwargs)
|
||||||
|
|
||||||
|
# Accept the CLI arguments by overriding the constructor (The __init__() method)
|
||||||
|
def __init__(self, ctx, title, movie):
|
||||||
|
|
||||||
|
# Pass the series_id argument to self so it's accessable across all methods
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
# Set boolean if movie was used
|
||||||
|
self.movie = movie
|
||||||
|
|
||||||
|
# Overriding the constructor
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
# Define the authenticate function
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'X-Device-Info': f'{{"id":"{uuid.uuid4()}","hardware":{{"manufacturer":"Browser (Desktop)","model":"Firefox","version":"124.0"}},"os":{{"name":"Windows","version":"10"}},"display":{{"width":1920,"height":1080}},"legal":{{}}',
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.movie:
|
||||||
|
|
||||||
|
movie_metadata = self.session.get(url=f'https://www.hmnow.com/api/core/catalog/item/{self.title}', headers=headers).json()
|
||||||
|
|
||||||
|
return Movies([Movie(
|
||||||
|
id_=movie_metadata['data']['id'],
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie_metadata['data']['title'],
|
||||||
|
year=movie_metadata['data']['releaseDate'][:4],
|
||||||
|
language="en",
|
||||||
|
data={'playbackUrl': movie_metadata['data']['video']['playback']}
|
||||||
|
)])
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
series_metadata = self.session.get(url=f'https://www.hmnow.com/api/core/catalog/collection/{self.title}', headers=headers).json()
|
||||||
|
|
||||||
|
season_ids = []
|
||||||
|
|
||||||
|
for season in series_metadata['data']:
|
||||||
|
season_ids.append(season['id'])
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
|
||||||
|
for season in season_ids:
|
||||||
|
season_metadata = self.session.get(url=f'https://www.hmnow.com/api/core/catalog/collection/{season}', headers=headers).json()
|
||||||
|
for episode in season_metadata['data']:
|
||||||
|
episodes.append(Episode(
|
||||||
|
id_=episode['id'],
|
||||||
|
service=self.__class__,
|
||||||
|
title=season_metadata['collection']['tvShow']['title'],
|
||||||
|
season=episode['tvShowSeason']['season']['number'],
|
||||||
|
number=episode['episode']['number'],
|
||||||
|
name=episode['title'],
|
||||||
|
year=episode['releaseDate'][:4],
|
||||||
|
language="en",
|
||||||
|
data={'playbackUrl': episode['video']['playback']}
|
||||||
|
))
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'X-Device-Info': f'{{"id":"{uuid.uuid4()}","hardware":{{"manufacturer":"Browser (Desktop)","model":"Firefox","version":"124.0"}},"os":{{"name":"Windows","version":"10"}},"display":{{"width":1920,"height":1080}},"legal":{{}}',
|
||||||
|
}
|
||||||
|
|
||||||
|
episode_metadata = self.session.get(url=title.data['playbackUrl'], headers=headers).json()
|
||||||
|
|
||||||
|
tracks = DASH.from_url(episode_metadata['playbackInfo']['videoStreams'][1]['url']).to_tracks("en")
|
||||||
|
|
||||||
|
title.data['licenseUrl'] = episode_metadata['playbackInfo']['videoStreams'][1]['drms'][0]['licenseUrl']
|
||||||
|
|
||||||
|
title.data['token'] = episode_metadata['playbackInfo']['videoStreams'][1]['drms'][0]['licenseTokenHeader']['value']
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'Authorization': f'{title.data["token"]}',
|
||||||
|
}
|
||||||
|
|
||||||
|
return requests.post(url='https://multidrm.vsaas.verimatrixcloud.net/widevine', headers=headers, data=challenge).content
|
||||||
356
services/HULU/__init__.py
Normal file
356
services/HULU/__init__.py
Normal file
@ -0,0 +1,356 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from click import Context
|
||||||
|
import hashlib
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from langcodes import Language
|
||||||
|
from typing import Any, Union, Optional
|
||||||
|
from devine.core.utils.pyhulu import Device, HuluClient
|
||||||
|
from pywidevine.cdm import Cdm as WidevineCdm
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Movies, Movie, Titles_T, Title_T, Series, Episode
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Subtitle, Chapter
|
||||||
|
from devine.core.manifests import HLS, DASH
|
||||||
|
from devine.core.tracks import Audio, Chapter, Subtitle, Tracks, Video
|
||||||
|
|
||||||
|
class HULU(Service):
|
||||||
|
"""
|
||||||
|
Service code for the Hulu streaming service (https://hulu.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Security: UHD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["HULU"]
|
||||||
|
|
||||||
|
TITLE_RE = (
|
||||||
|
r"^(?:https?://(?:www\.)?hulu\.com/(?P<type>movie|series)/)?(?:[a-z0-9-]+-)?"
|
||||||
|
r"(?P<id>[a-f0-9]{8}(?:-[a-f0-9]{4}){3}-[a-f0-9]{12})"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="HULU", short_help="hulu.com")
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option(
|
||||||
|
"-m", "--movie", is_flag=True, default=False, help="Title is a Movie."
|
||||||
|
)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return HULU(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title, movie: bool):
|
||||||
|
self.url = title
|
||||||
|
#m = self.parse_title(ctx, title)
|
||||||
|
#self.movie = movie or m.get("type") == "movie"
|
||||||
|
self.title = title
|
||||||
|
self.movie = movie
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
self.range = ctx.parent.params.get("range_")
|
||||||
|
|
||||||
|
self.device: Device
|
||||||
|
self.playback_params: dict = {}
|
||||||
|
self.hulu_h264_client: HuluClient
|
||||||
|
self.license_url: str
|
||||||
|
|
||||||
|
|
||||||
|
def get_titles(self):
|
||||||
|
titles = []
|
||||||
|
|
||||||
|
if self.movie:
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["movie"].format(id=self.title)
|
||||||
|
).json()
|
||||||
|
if "message" in r:
|
||||||
|
if r["message"] == "Unable to authenticate user":
|
||||||
|
self.log.exit(
|
||||||
|
" x Unable to authenticate user, are you sure the credentials are correct?"
|
||||||
|
)
|
||||||
|
title_data = r["details"]["vod_items"]["focus"]["entity"]
|
||||||
|
|
||||||
|
movie = Movie(
|
||||||
|
id_=self.title,
|
||||||
|
service=self.__class__,
|
||||||
|
name=title_data["name"],
|
||||||
|
year=int(title_data["premiere_date"][:4]),
|
||||||
|
language="en",
|
||||||
|
data=title_data
|
||||||
|
)
|
||||||
|
return Movies([movie])
|
||||||
|
else:
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["series"].format(id=self.title)
|
||||||
|
).json()
|
||||||
|
if r.get("code", 200) != 200:
|
||||||
|
if "Invalid uuid for param 'entity_id'" in r["message"]:
|
||||||
|
if len("-".join(self.title.split("-")[-5:])) != 36:
|
||||||
|
missing_chars = 36 - len("-".join(self.title.split("-")[-5:]))
|
||||||
|
|
||||||
|
self.log.exit(
|
||||||
|
f"Content id '{'-'.join(self.title.split('-')[-5:])}' should have 36 characters.\nYou're missing {missing_chars} character(s). Please make sure you provide the complete link."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
|
||||||
|
self.log.exit(
|
||||||
|
f"We were unable to retrieve this title from HULU...\nAre you sure '{'-'.join(self.title.split('-')[-5:])}' is the right content id?"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.exit(
|
||||||
|
f"Failed to get titles for {self.title}: {r['message']} [{r['code']}]"
|
||||||
|
)
|
||||||
|
|
||||||
|
season_data = next(
|
||||||
|
(x for x in r["components"] if x["name"] == "Episodes"), None
|
||||||
|
)
|
||||||
|
if not season_data:
|
||||||
|
|
||||||
|
self.log.exit(
|
||||||
|
f"We were unable to retrieve the episodes of '{r['name']}'\nIt's most likely you need a '{r['details']['entity']['primary_branding']['name']}' add-on at HULU"
|
||||||
|
)
|
||||||
|
|
||||||
|
episode_count = 0
|
||||||
|
for season in season_data["items"]:
|
||||||
|
episode_count += season["pagination"]["total_count"]
|
||||||
|
|
||||||
|
self.total_titles = (len(season_data["items"]), episode_count)
|
||||||
|
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in season_data["items"]:
|
||||||
|
episodes.extend(
|
||||||
|
self.session.get(
|
||||||
|
self.config["endpoints"]["season"].format(
|
||||||
|
id=self.title, season=season["id"].rsplit("::", 1)[1]
|
||||||
|
)
|
||||||
|
).json()["items"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
original_language = self.hulu_h264_client.load_playlist(
|
||||||
|
episodes[0]["bundle"]["eab_id"]
|
||||||
|
)["video_metadata"]["language"]
|
||||||
|
titles = Series()
|
||||||
|
for episode in episodes:
|
||||||
|
titles.add(
|
||||||
|
|
||||||
|
Episode(
|
||||||
|
id_=f"{season['id']}::{episode['season']}::{episode['number']}",
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode["series_name"],
|
||||||
|
season=int(episode["season"]),
|
||||||
|
number=int(episode["number"]),
|
||||||
|
name=episode["name"],
|
||||||
|
language="en",
|
||||||
|
data=episode
|
||||||
|
))
|
||||||
|
return titles
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title, HDR_available=False):
|
||||||
|
if self.vcodec == "H.264" and self.range[0].name == "SDR":
|
||||||
|
playlist = self.hulu_h264_client.load_playlist(
|
||||||
|
title.data["bundle"]["eab_id"]
|
||||||
|
)
|
||||||
|
self.license_url = playlist["wv_server"]
|
||||||
|
tracks = DASH.from_url(playlist["stream_url"], self.session).to_tracks(title.language)
|
||||||
|
else:
|
||||||
|
playlist = self.hulu_h265_client.load_playlist(
|
||||||
|
title.data["bundle"]["eab_id"]
|
||||||
|
)
|
||||||
|
self.license_url = playlist["wv_server"]
|
||||||
|
tracks = DASH.from_url(playlist["stream_url"], self.session).to_tracks(title.language)
|
||||||
|
|
||||||
|
# video_pssh = next(x.pssh for x in tracks.videos if x.pssh)
|
||||||
|
|
||||||
|
#for track in tracks.videos:
|
||||||
|
# if track.hdr10:
|
||||||
|
# """MPD only says HDR10+, but Hulu HDR streams are always
|
||||||
|
# Dolby Vision Profile 8 with HDR10+ compatibility"""
|
||||||
|
# HDR_available = True
|
||||||
|
|
||||||
|
#if not HDR_available:
|
||||||
|
if self.vcodec == "H.265" and self.range[0].name != "SDR":
|
||||||
|
playlist = self.hulu_h264_client.load_playlist(
|
||||||
|
title.data["bundle"]["eab_id"]
|
||||||
|
)
|
||||||
|
self.license_url = playlist["wv_server"]
|
||||||
|
tracks = DASH.from_url(playlist["stream_url"], self.session).to_tracks(title.language)
|
||||||
|
self.range[0].name == "SDR"
|
||||||
|
|
||||||
|
# for track in tracks.audio:
|
||||||
|
# if not track.pssh:
|
||||||
|
# track.pssh = video_pssh
|
||||||
|
|
||||||
|
subtitle_tracks = []
|
||||||
|
for sub_lang, sub_url in playlist["transcripts_urls"]["webvtt"].items():
|
||||||
|
tracks.add(Subtitle(
|
||||||
|
id_=hashlib.md5(sub_url.encode()).hexdigest()[0:6],
|
||||||
|
#source=self.ALIASES[0],
|
||||||
|
url=sub_url,
|
||||||
|
# metadata
|
||||||
|
codec=Subtitle.Codec.from_mime('vtt'),
|
||||||
|
language=sub_lang,
|
||||||
|
forced=False, # TODO: find out if sub is forced
|
||||||
|
sdh=False # TODO: find out if sub is SDH/CC, it's actually quite likely to be true
|
||||||
|
))
|
||||||
|
|
||||||
|
tracks.add(subtitle_tracks)
|
||||||
|
|
||||||
|
#for subtitle in tracks.subtitles:
|
||||||
|
# if subtitle.language.language == "en":
|
||||||
|
# subtitle.sdh = True # TODO: don't assume SDH
|
||||||
|
# title.tracks.subtitles.append(subtitle)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge, track, **_):
|
||||||
|
return self.session.post(
|
||||||
|
url=self.license_url, data=challenge # expects bytes
|
||||||
|
).content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
self.device = Device(
|
||||||
|
device_code=self.config["device"]["FireTV4K"]["code"],
|
||||||
|
device_key=self.config["device"]["FireTV4K"]["key"],
|
||||||
|
)
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"User-Agent": self.config["user_agent"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.h264_playback_params = {
|
||||||
|
"all_cdn": False,
|
||||||
|
"region": "US",
|
||||||
|
"language": "en",
|
||||||
|
"interface_version": "1.9.0",
|
||||||
|
"network_mode": "wifi",
|
||||||
|
"play_intent": "resume",
|
||||||
|
"playback": {
|
||||||
|
"version": 2,
|
||||||
|
"video": {
|
||||||
|
"codecs": {
|
||||||
|
"values": [
|
||||||
|
x
|
||||||
|
for x in self.config["codecs"]["video"]
|
||||||
|
if x["type"] == "H264"
|
||||||
|
],
|
||||||
|
"selection_mode": self.config["codecs"]["video_selection"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"audio": {
|
||||||
|
"codecs": {
|
||||||
|
"values": self.config["codecs"]["audio"],
|
||||||
|
"selection_mode": self.config["codecs"]["audio_selection"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"drm": {
|
||||||
|
"values": self.config["drm"]["schemas"],
|
||||||
|
"selection_mode": self.config["drm"]["selection_mode"],
|
||||||
|
"hdcp": self.config["drm"]["hdcp"],
|
||||||
|
},
|
||||||
|
"manifest": {
|
||||||
|
"type": "DASH",
|
||||||
|
"https": True,
|
||||||
|
"multiple_cdns": False,
|
||||||
|
"patch_updates": True,
|
||||||
|
"hulu_types": True,
|
||||||
|
"live_dai": True,
|
||||||
|
"secondary_audio": True,
|
||||||
|
"live_fragment_delay": 3,
|
||||||
|
},
|
||||||
|
"segments": {
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"type": "FMP4",
|
||||||
|
"encryption": {"mode": "CENC", "type": "CENC"},
|
||||||
|
"https": True,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"selection_mode": "ONE",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.h265_playback_params = {
|
||||||
|
"all_cdn": False,
|
||||||
|
"region": "US",
|
||||||
|
"language": "en",
|
||||||
|
"interface_version": "1.9.0",
|
||||||
|
"network_mode": "wifi",
|
||||||
|
"play_intent": "resume",
|
||||||
|
"playback": {
|
||||||
|
"version": 2,
|
||||||
|
"video": {
|
||||||
|
"dynamic_range": "HDR10PLUS",
|
||||||
|
"codecs": {
|
||||||
|
"values": [
|
||||||
|
x
|
||||||
|
for x in self.config["codecs"]["video"]
|
||||||
|
if x["type"] == "H265"
|
||||||
|
],
|
||||||
|
"selection_mode": self.config["codecs"]["video_selection"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"audio": {
|
||||||
|
"codecs": {
|
||||||
|
"values": self.config["codecs"]["audio"],
|
||||||
|
"selection_mode": self.config["codecs"]["audio_selection"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"drm": {
|
||||||
|
"multi_key": True,
|
||||||
|
"values": self.config["drm"]["schemas"],
|
||||||
|
"selection_mode": self.config["drm"]["selection_mode"],
|
||||||
|
"hdcp": self.config["drm"]["hdcp"],
|
||||||
|
},
|
||||||
|
"manifest": {
|
||||||
|
"type": "DASH",
|
||||||
|
"https": True,
|
||||||
|
"multiple_cdns": False,
|
||||||
|
"patch_updates": True,
|
||||||
|
"hulu_types": True,
|
||||||
|
"live_dai": True,
|
||||||
|
"secondary_audio": True,
|
||||||
|
"live_fragment_delay": 3,
|
||||||
|
},
|
||||||
|
"segments": {
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"type": "FMP4",
|
||||||
|
"encryption": {"mode": "CENC", "type": "CENC"},
|
||||||
|
"https": True,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"selection_mode": "ONE",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.hulu_h264_client = HuluClient(
|
||||||
|
device=self.device,
|
||||||
|
session=self.session,
|
||||||
|
version=self.config["device"].get("device_version"),
|
||||||
|
**self.h264_playback_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.hulu_h265_client = HuluClient(
|
||||||
|
device=self.device,
|
||||||
|
session=self.session,
|
||||||
|
version=self.config["device"].get("device_version"),
|
||||||
|
**self.h265_playback_params,
|
||||||
|
)
|
||||||
54
services/HULU/config.yaml
Normal file
54
services/HULU/config.yaml
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
user_agent: Mozilla/5.0 (Fire OS 6.2.7.6; Amazon AFTMM; armeabi-v7a) AppleWebKit/604.1.38
|
||||||
|
(KHTML, like Gecko) Neutron/1.3.37 Hulu/0 TV Safari/604.1.38
|
||||||
|
endpoints:
|
||||||
|
movie: https://discover.hulu.com/content/v5/hubs/movie/{id}?limit=999&schema=9&referralHost=production
|
||||||
|
series: https://discover.hulu.com/content/v5/hubs/series/{id}?limit=999&schema=9&referralHost=production
|
||||||
|
season: https://discover.hulu.com/content/v5/hubs/series/{id}/season/{season}?limit=999&schema=9&referralHost=production
|
||||||
|
device:
|
||||||
|
PC:
|
||||||
|
code: '159'
|
||||||
|
key: 6ebfc84f3c304217b20fd9a66cb5957f
|
||||||
|
Chrome:
|
||||||
|
code: '190'
|
||||||
|
key: 6ebfc84f3c304217b20fd9a66cb5957f
|
||||||
|
FireTV:
|
||||||
|
code: '188'
|
||||||
|
key: ca8d311a734854871623d906b968a073
|
||||||
|
FireTV4K:
|
||||||
|
code: '208'
|
||||||
|
key: fa49ca06261fe41b6e56fa2d24b4f295
|
||||||
|
Shield:
|
||||||
|
code: '109'
|
||||||
|
key: d0f4adc1d8a774256acb00c0fff46f5f
|
||||||
|
Shield2:
|
||||||
|
code: '142'
|
||||||
|
key: d6bdf1f49c73db36f465536162ccc830
|
||||||
|
codecs:
|
||||||
|
video_selection: ONE
|
||||||
|
audio_selection: ALL
|
||||||
|
video:
|
||||||
|
- type: H265
|
||||||
|
profile: MAIN_10
|
||||||
|
width: 3840
|
||||||
|
height: 2160
|
||||||
|
framerate: 60
|
||||||
|
level: '5.1'
|
||||||
|
tier: MAIN
|
||||||
|
- type: H264
|
||||||
|
profile: HIGH
|
||||||
|
width: 1920
|
||||||
|
height: 1080
|
||||||
|
framerate: 60
|
||||||
|
level: '5.2'
|
||||||
|
audio:
|
||||||
|
- type: AAC
|
||||||
|
- type: EC3
|
||||||
|
drm:
|
||||||
|
selection_mode: ONE
|
||||||
|
hdcp: true
|
||||||
|
schemas:
|
||||||
|
- type: WIDEVINE
|
||||||
|
version: MODULAR
|
||||||
|
security_level: L1
|
||||||
|
|
||||||
|
key_policy: ALL
|
||||||
377
services/Hotstar/__init__.py
Normal file
377
services/Hotstar/__init__.py
Normal file
@ -0,0 +1,377 @@
|
|||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlparse, parse_qs
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from urllib.request import urlopen, Request
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from urllib3.util.retry import Retry
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from click import Context
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class Hotstar(Service):
|
||||||
|
"""
|
||||||
|
Service code for Star India's Hotstar (aka Disney+ Hotstar) streaming service (https://hotstar.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Credentials
|
||||||
|
Security: UHD@L3, doesn't seem to care about releases.
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips: - The library of contents can be viewed without logging in at https://hotstar.com
|
||||||
|
- The homepage hosts domestic programming; Disney+ content is at https://hotstar.com/in/disneyplus
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["HS", "hotstar"]
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?hotstar\.com/[a-z0-9/-]+/)(?P<id>\d+)"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="Hotstar", short_help="https://hotstar.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.option("-m", "--movie", is_flag=True, default=False, help="Title is a movie.")
|
||||||
|
@click.option("-q", "--quality", default="fhd",
|
||||||
|
type=click.Choice(["4k", "fhd", "hd", "sd"], case_sensitive=False),
|
||||||
|
help="Manifest quality to request.")
|
||||||
|
@click.option("-ac", "--audio-codec", default="dolby51",
|
||||||
|
type=click.Choice(["dolby51", "stereo", "atmos"], case_sensitive=False),
|
||||||
|
help="Audio Codec")
|
||||||
|
@click.option("-rg", "--region", default="in", type=click.Choice(["in", "id", "th"], case_sensitive=False),
|
||||||
|
help="Account region")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, **kwargs: Any) -> 'Hotstar':
|
||||||
|
return Hotstar(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx:Context, title, movie, quality, audio_codec, region):
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.parse_title(ctx, title)
|
||||||
|
self.movie = movie
|
||||||
|
self.quality = quality
|
||||||
|
self.audio_codec = audio_codec
|
||||||
|
self.region = region.lower()
|
||||||
|
|
||||||
|
assert ctx.parent is not None
|
||||||
|
|
||||||
|
self.vcodec = ctx.parent.params["vcodec"]
|
||||||
|
self.acodec = ctx.parent.params["acodec"] or "EC3"
|
||||||
|
self.range = ctx.parent.params["range_"]
|
||||||
|
|
||||||
|
self.profile = ctx.obj.profile
|
||||||
|
|
||||||
|
self.device_id = None
|
||||||
|
self.hotstar_auth = None
|
||||||
|
self.token = None
|
||||||
|
self.license_api = None
|
||||||
|
|
||||||
|
self.configure()
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
headers = {
|
||||||
|
"Accept": "*/*",
|
||||||
|
"Accept-Language": "en-GB,en;q=0.5",
|
||||||
|
"hotstarauth": self.hotstar_auth,
|
||||||
|
"X-HS-UserToken": self.token,
|
||||||
|
"X-HS-Platform": self.config["device"]["platform"]["name"],
|
||||||
|
"X-HS-AppVersion": self.config["device"]["platform"]["version"],
|
||||||
|
"X-Country-Code": "in",
|
||||||
|
"x-platform-code": "PCTV"
|
||||||
|
}
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.config["endpoints"]["movie_title"] if self.movie else self.config["endpoints"]["tv_title"],
|
||||||
|
headers=headers,
|
||||||
|
params={"contentId": self.title}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = r.json()["body"]["results"]["item"]
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to load title manifest: {res.text}")
|
||||||
|
|
||||||
|
self.content_type = res["assetType"]
|
||||||
|
self.lang = res["langObjs"][0]["iso3code"]
|
||||||
|
|
||||||
|
if self.content_type == "MOVIE":
|
||||||
|
return Movies([Movie(
|
||||||
|
id_=res.get("contentId"),
|
||||||
|
service=self.__class__,
|
||||||
|
name=res["title"],
|
||||||
|
year=res["year"],
|
||||||
|
language=self.lang,
|
||||||
|
data=res,
|
||||||
|
)])
|
||||||
|
else:
|
||||||
|
show_data = res
|
||||||
|
episodes = []
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.config["endpoints"]["tv_episodes"],
|
||||||
|
headers=headers,
|
||||||
|
params={
|
||||||
|
"eid": res["id"],
|
||||||
|
"etid": "2",
|
||||||
|
"tao": "0",
|
||||||
|
"tas": "1000"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = r.json()["body"]["results"]["assets"]["items"]
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Failed to load episodes list: {r.text}")
|
||||||
|
return [Series([Episode(
|
||||||
|
id_=ep.get("contentId"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=ep.get("showShortTitle") or show_data["title"],
|
||||||
|
year=ep.get("year"),
|
||||||
|
season=ep.get("seasonNo"),
|
||||||
|
number=ep.get("episodeNo"),
|
||||||
|
name=ep.get("title"),
|
||||||
|
language=ep.get("langObjs", [{}])[0].get("iso3code", self.lang),
|
||||||
|
data=ep
|
||||||
|
)]) for ep in episodes]
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
if self.range == 'HDR10':
|
||||||
|
range = 'hdr10'
|
||||||
|
elif self.range == 'SDR':
|
||||||
|
range = 'sdr'
|
||||||
|
elif self.range == 'DV':
|
||||||
|
range = 'dv'
|
||||||
|
self.vcodec = 'dvh265'
|
||||||
|
else:
|
||||||
|
range = 'sdr'
|
||||||
|
self.vcodec = 'h264'
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.config["endpoints"]["manifest"], # .format(id=title.service_data["contentId"]),
|
||||||
|
params={
|
||||||
|
"content_id": title.data["contentId"],
|
||||||
|
"filters": f"content_type={self.content_type}",
|
||||||
|
"client_capabilities": "{\"package\":[\"dash\",\"hls\"],\"container\":[\"fmp4br\"],\"ads\":[\"non_ssai\",\"ssai\"],\"audio_channel\":[\"" + self.audio_codec + "\"],\"encryption\":[\"plain\",\"widevine\"],\"video_codec\":[\"" + self.vcodec + "\"],\"ladder\":[\"tv\"],\"resolution\":[\"" + self.quality + "\"],\"true_resolution\":[\"" + self.quality + "\"],\"dynamic_range\":[\"" + range + "\"]}",
|
||||||
|
"drm_parameters": "{\"widevine_security_level\":[\"SW_SECURE_DECODE\",\"SW_SECURE_CRYPTO\"],\"hdcp_version\":[\"HDCP_V2_2\",\"HDCP_V2_1\",\"HDCP_V2\",\"HDCP_V1\"]}"
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"user-agent": "Disney+;in.startv.hotstar.dplus.tv/23.08.14.4.2915 (Android/13)",
|
||||||
|
"hotstarauth": self.hotstar_auth,
|
||||||
|
"x-hs-usertoken": self.token,
|
||||||
|
"x-hs-device-id": self.device_id,
|
||||||
|
"x-hs-client": "platform:androidtv;app_id:in.startv.hotstar.dplus.tv;app_version:23.08.14.4;os:Android;os_version:13;schema_version:0.0.970",
|
||||||
|
"x-hs-platform": "androidtv",
|
||||||
|
"content-type": "application/json",
|
||||||
|
}
|
||||||
|
).json()
|
||||||
|
|
||||||
|
playback = r['success']['page']['spaces']['player']['widget_wrappers'][0]['widget']['data']['player_config'][
|
||||||
|
'media_asset']['primary']
|
||||||
|
|
||||||
|
if playback == {}:
|
||||||
|
raise ValueError("Wanted playback set is unavailable for this title...")
|
||||||
|
|
||||||
|
if 'widevine' in playback['playback_tags']:
|
||||||
|
self.license_api = playback["license_url"]
|
||||||
|
|
||||||
|
mpd_url = playback['content_url'].split('?')[0]
|
||||||
|
|
||||||
|
tracks = self.session.get(mpd_url)
|
||||||
|
|
||||||
|
tracks.add(DASH.from_url(
|
||||||
|
url=playback['content_url'],
|
||||||
|
).to_tracks(title.language))
|
||||||
|
for track in tracks:
|
||||||
|
track.needs_proxy = True
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_):
|
||||||
|
return None # will use common privacy cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge, **_):
|
||||||
|
return self.session.post(
|
||||||
|
url=self.license_api,
|
||||||
|
data=challenge # expects bytes
|
||||||
|
).content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
self.session.headers.update({
|
||||||
|
"Origin": "https://www.hotstar.com",
|
||||||
|
"Referer": f"https://www.hotstar.com/{self.region}"
|
||||||
|
})
|
||||||
|
retry_strategy = Retry(
|
||||||
|
total=5,
|
||||||
|
backoff_factor=1,
|
||||||
|
status_forcelist=[500, 502, 503, 504],
|
||||||
|
allowed_methods=["GET", "POST"]
|
||||||
|
)
|
||||||
|
adapter = HTTPAdapter(max_retries=retry_strategy)
|
||||||
|
self.session.mount("https://", adapter)
|
||||||
|
self.session.mount("http://", adapter)
|
||||||
|
self.log.info("Logging into Hotstar")
|
||||||
|
self.hotstar_auth = self.get_akamai()
|
||||||
|
self.log.info(f" + Calculated HotstarAuth: {self.hotstar_auth}")
|
||||||
|
if self.session.cookies:
|
||||||
|
self.device_id = self.session.cookies.get("deviceId")
|
||||||
|
self.log.info(f" + Using Device ID: {self.device_id}")
|
||||||
|
else:
|
||||||
|
self.device_id = str(uuid.uuid4())
|
||||||
|
self.log.info(f" + Created Device ID: {self.device_id}")
|
||||||
|
self.token = self.get_token()
|
||||||
|
self.log.info(" + Obtained tokens")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_akamai():
|
||||||
|
enc_key = b"\x05\xfc\x1a\x01\xca\xc9\x4b\xc4\x12\xfc\x53\x12\x07\x75\xf9\xee"
|
||||||
|
st = int(time.time())
|
||||||
|
exp = st + 12000
|
||||||
|
res = f"st={st}~exp={exp}~acl=/*"
|
||||||
|
res += "~hmac=" + hmac.new(enc_key, res.encode(), hashlib.sha256).hexdigest()
|
||||||
|
return res
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
token_cache_path = Path(config.directories.cache / self.__class__.__name__ / f"token.json")
|
||||||
|
if os.path.isfile(token_cache_path):
|
||||||
|
with open(token_cache_path, encoding="utf-8") as fd:
|
||||||
|
token = json.load(fd)
|
||||||
|
if token.get("exp", 0) > int(time.time()):
|
||||||
|
self.log.info(" + Using cached auth tokens...")
|
||||||
|
return token["uid"]
|
||||||
|
else:
|
||||||
|
self.log.info(" + Refreshing and using cached auth tokens...")
|
||||||
|
return self.save_token(self.refresh(token["uid"], token["sub"]["deviceId"]), token_cache_path)
|
||||||
|
if self.session.cookies:
|
||||||
|
token = self.session.cookies.get("sessionUserUP", None, 'www.hotstar.com', '/' + self.region)
|
||||||
|
else:
|
||||||
|
raise self.log.error(f" - Please add cookies")
|
||||||
|
return self.save_token(token, token_cache_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def save_token(token, to):
|
||||||
|
data = json.loads(base64.b64decode(token.split(".")[1] + "===").decode("utf-8"))
|
||||||
|
data["uid"] = token
|
||||||
|
data["sub"] = json.loads(data["sub"])
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(to), exist_ok=True)
|
||||||
|
with open(to, mode="w", encoding="utf-8") as f:
|
||||||
|
f.write(json.dumps(data, indent=4))
|
||||||
|
|
||||||
|
return token
|
||||||
|
|
||||||
|
def refresh(self, user_id_token, device_id):
|
||||||
|
json_data = {
|
||||||
|
'deeplink_url': f'/{self.region}?client_capabilities=%7B%22ads%22%3A%5B%22non_ssai%22%5D%2C%22audio_channel%22%3A%5B%22stereo%22%5D%2C%22container%22%3A%5B%22fmp4%22%2C%22ts%22%5D%2C%22dvr%22%3A%5B%22short%22%5D%2C%22dynamic_range%22%3A%5B%22sdr%22%5D%2C%22encryption%22%3A%5B%22widevine%22%2C%22plain%22%5D%2C%22ladder%22%3A%5B%22web%22%2C%22tv%22%2C%22phone%22%5D%2C%22package%22%3A%5B%22dash%22%2C%22hls%22%5D%2C%22resolution%22%3A%5B%22sd%22%2C%22hd%22%5D%2C%22video_codec%22%3A%5B%22h264%22%5D%2C%22true_resolution%22%3A%5B%22sd%22%2C%22hd%22%2C%22fhd%22%5D%7D&drm_parameters=%7B%22hdcp_version%22%3A%5B%22HDCP_V2_2%22%5D%2C%22widevine_security_level%22%3A%5B%22SW_SECURE_DECODE%22%5D%2C%22playready_security_level%22%3A%5B%5D%7D',
|
||||||
|
'app_launch_count': 1,
|
||||||
|
}
|
||||||
|
r = self.session.post(
|
||||||
|
url=self.config["endpoints"]["refresh"],
|
||||||
|
headers={
|
||||||
|
'x-hs-usertoken': user_id_token,
|
||||||
|
'X-HS-Platform': self.config["device"]["platform"]["name"],
|
||||||
|
'X-Country-Code': self.region,
|
||||||
|
'X-HS-Accept-language': 'eng',
|
||||||
|
'X-Request-Id': str(uuid.uuid4()),
|
||||||
|
'x-hs-device-id': device_id,
|
||||||
|
'X-HS-Client-Targeting': f'ad_id:{device_id};user_lat:false',
|
||||||
|
'X-HS-Client': 'platform:web;app_version:23.06.23.3;browser:Firefox;schema_version:0.0.911',
|
||||||
|
},
|
||||||
|
json=json_data
|
||||||
|
)
|
||||||
|
for cookie in self.session.cookies:
|
||||||
|
if cookie.name == 'sessionUserUP' and cookie.path == f"/{self.region}" and cookie.domain == 'www.hotstar.com':
|
||||||
|
cookie.value = r.headers["x-hs-usertoken"]
|
||||||
|
print(cookie)
|
||||||
|
for x in self.ALIASES:
|
||||||
|
cookie_file = os.path.join(config._Directories.cookies, x.lower(), f"{self.profile}.txt")
|
||||||
|
if not os.path.isfile(cookie_file):
|
||||||
|
cookie_file = os.path.join(config._Directories.cookies, x, f"{self.profile}.txt")
|
||||||
|
if os.path.isfile(cookie_file):
|
||||||
|
self.session.cookies.save(cookie_file, ignore_discard=True, ignore_expires=True)
|
||||||
|
break
|
||||||
|
return r.headers["x-hs-usertoken"]
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential=None):
|
||||||
|
if cookies:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
"""
|
||||||
|
Log in to HOTSTAR and return a JWT User Identity token.
|
||||||
|
:returns: JWT User Identity token.
|
||||||
|
"""
|
||||||
|
# self.credential = credential
|
||||||
|
# if self.credential.username == "username" and self.credential.password == "password":
|
||||||
|
# logincode_url = f"https://api.hotstar.com/{self.region}/aadhar/v2/firetv/{self.region}/users/logincode/"
|
||||||
|
# logincode_headers = {
|
||||||
|
# "Content-Length": "0",
|
||||||
|
# "User-Agent": "Hotstar;in.startv.hotstar/3.3.0 (Android/8.1.0)"
|
||||||
|
# }
|
||||||
|
# logincode = self.session.post(
|
||||||
|
# url=logincode_url,
|
||||||
|
# headers=logincode_headers
|
||||||
|
# ).json()["description"]["code"]
|
||||||
|
# print(f"Go to tv.hotstar.com and put {logincode}")
|
||||||
|
# logincode_choice = input('Did you put as informed above? (y/n): ')
|
||||||
|
# if logincode_choice.lower() == 'y':
|
||||||
|
# res = self.session.get(
|
||||||
|
# url=logincode_url + logincode,
|
||||||
|
# headers=logincode_headers
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# self.log.error(" - Exited.")
|
||||||
|
# raise
|
||||||
|
# else:
|
||||||
|
# res = self.session.post(
|
||||||
|
# url=self.config["endpoints"]["login"],
|
||||||
|
# json={
|
||||||
|
# "isProfileRequired": "false",
|
||||||
|
# "userData": {
|
||||||
|
# "deviceId": self.device_id,
|
||||||
|
# "usertype": "email"
|
||||||
|
# },
|
||||||
|
# "verification": {}
|
||||||
|
# },
|
||||||
|
# headers={
|
||||||
|
# "hotstarauth": self.hotstar_auth,
|
||||||
|
# "content-type": "application/json"
|
||||||
|
# }
|
||||||
|
# )
|
||||||
|
# try:
|
||||||
|
# data = res.json()
|
||||||
|
# except json.JSONDecodeError:
|
||||||
|
# self.log.exit(f" - Failed to get auth token, response was not JSON: {res.text}")
|
||||||
|
# raise
|
||||||
|
# if "errorCode" in data:
|
||||||
|
# self.log.errro(f" - Login failed: {data['description']} [{data['errorCode']}]")
|
||||||
|
# raise
|
||||||
|
# return data["description"]["userIdentity"]
|
||||||
|
|
||||||
|
def parse_title(self, ctx, title):
|
||||||
|
title = title or ctx.parent.params.get("title")
|
||||||
|
if not title:
|
||||||
|
self.log.error(" - No title ID specified")
|
||||||
|
if not getattr(self, "TITLE_RE"):
|
||||||
|
self.title = title
|
||||||
|
return {}
|
||||||
|
for regex in as_list(self.TITLE_RE):
|
||||||
|
m = re.search(regex, title)
|
||||||
|
if m:
|
||||||
|
self.title = m.group("id")
|
||||||
|
return m.groupdict()
|
||||||
|
self.log.warning(f" - Unable to parse title ID {title!r}, using as-is")
|
||||||
|
self.title = title
|
||||||
18
services/Hotstar/config.yaml
Normal file
18
services/Hotstar/config.yaml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
endpoints:
|
||||||
|
login: 'https://api.hotstar.com/in/aadhar/v2/web/th/user/login'
|
||||||
|
#refresh: 'https://www.hotstar.com/api/internal/bff/v2/start'
|
||||||
|
refresh: 'https://api.hotstar.com/um/v3/users/refresh'
|
||||||
|
tv_title: 'https://api.hotstar.com/o/v1/show/detail'
|
||||||
|
tv_episodes: 'https://api.hotstar.com/o/v1/tray/g/1/detail'
|
||||||
|
movie_title: 'https://api.hotstar.com/o/v1/movie/detail'
|
||||||
|
#manifest: 'https://api.hotstar.com/play/v4/playback/content/{id}'
|
||||||
|
manifest: 'https://apix.hotstar.com/v2/pages/watch'
|
||||||
|
|
||||||
|
device:
|
||||||
|
os:
|
||||||
|
name: 'Windows'
|
||||||
|
version: 10
|
||||||
|
|
||||||
|
platform:
|
||||||
|
name: 'web'
|
||||||
|
version: '7.35.0'
|
||||||
365
services/ITV/__init__.py
Normal file
365
services/ITV/__init__.py
Normal file
@ -0,0 +1,365 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import MozillaCookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from click import Context
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Subtitle, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class ITV(Service):
|
||||||
|
"""
|
||||||
|
Service code for ITVx streaming service (https://www.itv.com/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Cookies (Optional for free content | Required for premium content)
|
||||||
|
Robustness:
|
||||||
|
L1: 1080p
|
||||||
|
L3: 720p
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title URL as input (pay attention to the URL format):
|
||||||
|
SERIES: https://www.itv.com/watch/bay-of-fires/10a5270
|
||||||
|
EPISODE: https://www.itv.com/watch/bay-of-fires/10a5270/10a5270a0001
|
||||||
|
FILM: https://www.itv.com/watch/mad-max-beyond-thunderdome/2a7095
|
||||||
|
- Some shows aren't listed as series, only as "Latest episodes"
|
||||||
|
Download by SERIES URL for those titles, not by EPISODE URL
|
||||||
|
|
||||||
|
\b
|
||||||
|
Examples:
|
||||||
|
- SERIES: devine dl -w s01e01 itv https://www.itv.com/watch/bay-of-fires/10a5270
|
||||||
|
- EPISODE: devine dl itv https://www.itv.com/watch/bay-of-fires/10a5270/10a5270a0001
|
||||||
|
- FILM: devine dl itv https://www.itv.com/watch/mad-max-beyond-thunderdome/2a7095
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
ITV seem to detect and throttle multiple connections against the server.
|
||||||
|
It's recommended to use requests as downloader, with few workers.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
ALIASES = ("itvx",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="ITV", short_help="https://www.itv.com/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> ITV:
|
||||||
|
return ITV(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.profile = ctx.parent.params.get("profile")
|
||||||
|
if not self.profile:
|
||||||
|
self.profile = "default"
|
||||||
|
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
self.authorization = None
|
||||||
|
|
||||||
|
if credential and not cookies:
|
||||||
|
self.log.error(" - Error: This service requires cookies for authentication.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if cookies is not None:
|
||||||
|
self.log.info(f"\n + Cookies for '{self.profile}' profile found, authenticating...")
|
||||||
|
itv_session = next((cookie.value for cookie in cookies if cookie.name == "Itv.Session"), None)
|
||||||
|
if not itv_session:
|
||||||
|
self.log.error(" - Error: Session cookie not found. Cookies may be invalid.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
itv_session = json.loads(itv_session)
|
||||||
|
refresh_token = itv_session["tokens"]["content"].get("refresh_token")
|
||||||
|
if not refresh_token:
|
||||||
|
self.log.error(" - Error: Access tokens not found. Try refreshing your cookies.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
cache = self.cache.get(f"tokens_{self.profile}")
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Host": "auth.prd.user.itv.com",
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0",
|
||||||
|
"Accept": "application/vnd.user.auth.v2+json",
|
||||||
|
"Accept-Language": "en-US,en;q=0.8",
|
||||||
|
"Origin": "https://www.itv.com",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Referer": "https://www.itv.com/",
|
||||||
|
}
|
||||||
|
|
||||||
|
params = {"refresh": cache.data["refresh_token"]} if cache else {"refresh": refresh_token}
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["refresh"],
|
||||||
|
headers=headers,
|
||||||
|
params=params,
|
||||||
|
)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to refresh tokens: {r.text}")
|
||||||
|
|
||||||
|
tokens = r.json()
|
||||||
|
cache.set(tokens)
|
||||||
|
self.log.info(" + Tokens refreshed and placed in cache\n")
|
||||||
|
|
||||||
|
self.authorization = tokens["access_token"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"broadcaster": "itv",
|
||||||
|
"featureSet": "clearkey,outband-webvtt,hls,aes,playready,widevine,fairplay,bbts,progressive,hd,rtmpe",
|
||||||
|
"onlyFree": "false",
|
||||||
|
"platform": "dotcom",
|
||||||
|
"query": self.title,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
results = r.json()["results"]
|
||||||
|
if isinstance(results, list):
|
||||||
|
for result in results:
|
||||||
|
special = result["data"].get("specialTitle")
|
||||||
|
standard = result["data"].get("programmeTitle")
|
||||||
|
film = result["data"].get("filmTitle")
|
||||||
|
title = special if special else standard if standard else film
|
||||||
|
tier = result["data"].get("tier")
|
||||||
|
|
||||||
|
slug = self._sanitize(title)
|
||||||
|
|
||||||
|
_id = result["data"]["legacyId"]["apiEncoded"]
|
||||||
|
_id = "_".join(_id.split("_")[:2]).replace("_", "a")
|
||||||
|
_id = re.sub(r"a000\d+", "", _id)
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=f"https://www.itv.com/watch/{slug}/{_id}",
|
||||||
|
title=title,
|
||||||
|
description=result["data"].get("synopsis"),
|
||||||
|
label=result.get("entityType") + f" {tier}",
|
||||||
|
url=f"https://www.itv.com/watch/{slug}/{_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
data = self.get_data(self.title)
|
||||||
|
kind = next(
|
||||||
|
(x.get("seriesType") for x in data.get("seriesList") if x.get("seriesType") in ["SERIES", "FILM"]), None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Some shows are not listed as "SERIES" or "FILM", only as "Latest episodes"
|
||||||
|
if not kind and next(
|
||||||
|
(x for x in data.get("seriesList") if x.get("seriesLabel").lower() in ("latest episodes", "other episodes")), None
|
||||||
|
):
|
||||||
|
titles = data["seriesList"][0]["titles"]
|
||||||
|
episodes =[
|
||||||
|
Episode(
|
||||||
|
id_=episode["episodeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["programme"]["title"],
|
||||||
|
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||||
|
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||||
|
name=episode["episodeTitle"],
|
||||||
|
language="en", # TODO: language detection
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in titles
|
||||||
|
]
|
||||||
|
# Assign episode numbers to special seasons
|
||||||
|
counter = 1
|
||||||
|
for episode in episodes:
|
||||||
|
if episode.season == 0 and episode.number == 0:
|
||||||
|
episode.number = counter
|
||||||
|
counter += 1
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
if kind == "SERIES" and data.get("episode"):
|
||||||
|
episode = data.get("episode")
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["episodeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["programme"]["title"],
|
||||||
|
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||||
|
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||||
|
name=episode["episodeTitle"],
|
||||||
|
language="en", # TODO: language detection
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif kind == "SERIES":
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["episodeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["programme"]["title"],
|
||||||
|
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||||
|
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||||
|
name=episode["episodeTitle"],
|
||||||
|
language="en", # TODO: language detection
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for series in data["seriesList"]
|
||||||
|
if "Latest episodes" not in series["seriesLabel"]
|
||||||
|
for episode in series["titles"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif kind == "FILM":
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie["episodeId"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=data["programme"]["title"],
|
||||||
|
year=movie.get("productionYear"),
|
||||||
|
language="en", # TODO: language detection
|
||||||
|
data=movie,
|
||||||
|
)
|
||||||
|
for movies in data["seriesList"]
|
||||||
|
for movie in movies["titles"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
playlist = title.data.get("playlistUrl")
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/vnd.itv.vod.playlist.v4+json",
|
||||||
|
"Accept-Language": "en-US,en;q=0.9,da;q=0.8",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"client": {
|
||||||
|
"id": "lg",
|
||||||
|
},
|
||||||
|
"device": {
|
||||||
|
"deviceGroup": "ctv",
|
||||||
|
},
|
||||||
|
"variantAvailability": {
|
||||||
|
"player": "dash",
|
||||||
|
"featureset": [
|
||||||
|
"mpeg-dash",
|
||||||
|
"widevine",
|
||||||
|
"outband-webvtt",
|
||||||
|
"hd",
|
||||||
|
"single-track",
|
||||||
|
],
|
||||||
|
"platformTag": "ctv",
|
||||||
|
"drm": {
|
||||||
|
"system": "widevine",
|
||||||
|
"maxSupported": "L3",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if self.authorization:
|
||||||
|
payload["user"] = {"token": self.authorization}
|
||||||
|
|
||||||
|
r = self.session.post(playlist, headers=headers, json=payload)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
r = self.session.post(playlist, headers=headers, json=payload)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
video = data["Playlist"]["Video"]
|
||||||
|
subtitles = video.get("Subtitles")
|
||||||
|
self.manifest = video["MediaFiles"][0].get("Href")
|
||||||
|
self.license = video["MediaFiles"][0].get("KeyServiceUrl")
|
||||||
|
|
||||||
|
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||||
|
tracks.videos[0].data = data
|
||||||
|
|
||||||
|
if subtitles is not None:
|
||||||
|
for subtitle in subtitles:
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(subtitle.get("Href", "").encode()).hexdigest()[0:6],
|
||||||
|
url=subtitle.get("Href", ""),
|
||||||
|
codec=Subtitle.Codec.from_mime(subtitle.get("Href", "")[-3:]),
|
||||||
|
language=title.language,
|
||||||
|
forced=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["representation"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
track = title.tracks.videos[0]
|
||||||
|
if not track.data["Playlist"].get("ContentBreaks"):
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
breaks = track.data["Playlist"]["ContentBreaks"]
|
||||||
|
timecodes = [".".join(x.get("TimeCode").rsplit(":", 1)) for x in breaks if x.get("TimeCode") != "00:00:00:000"]
|
||||||
|
|
||||||
|
# End credits are sometimes listed before the last chapter, so we skip those for now
|
||||||
|
return Chapters([Chapter(timecode) for timecode in timecodes])
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def get_data(self, url: str) -> dict:
|
||||||
|
# TODO: Find a proper endpoint for this
|
||||||
|
|
||||||
|
r = self.session.get(url)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
soup = BeautifulSoup(r.text, "html.parser")
|
||||||
|
props = soup.select_one("#__NEXT_DATA__").text
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(props)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Failed to parse JSON: {e}")
|
||||||
|
|
||||||
|
return data["props"]["pageProps"]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _sanitize(title: str) -> str:
|
||||||
|
title = title.lower()
|
||||||
|
title = title.replace("&", "and")
|
||||||
|
title = re.sub(r"[:;/()]", "", title)
|
||||||
|
title = re.sub(r"[ ]", "-", title)
|
||||||
|
title = re.sub(r"[\\*!?¿,'\"<>|$#`’]", "", title)
|
||||||
|
title = re.sub(rf"[{'.'}]{{2,}}", ".", title)
|
||||||
|
title = re.sub(rf"[{'_'}]{{2,}}", "_", title)
|
||||||
|
title = re.sub(rf"[{'-'}]{{2,}}", "-", title)
|
||||||
|
title = re.sub(rf"[{' '}]{{2,}}", " ", title)
|
||||||
|
return title
|
||||||
8
services/ITV/config.yaml
Normal file
8
services/ITV/config.yaml
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
headers:
|
||||||
|
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0
|
||||||
|
accept-language: en-US,en;q=0.8
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
login: https://auth.prd.user.itv.com/v2/auth
|
||||||
|
refresh: https://auth.prd.user.itv.com/token
|
||||||
|
search: https://textsearch.prd.oasvc.itv.com/search
|
||||||
496
services/MAX/__init__.py
Normal file
496
services/MAX/__init__.py
Normal file
@ -0,0 +1,496 @@
|
|||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from hashlib import md5
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
import xmltodict
|
||||||
|
from langcodes import Language
|
||||||
|
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks.video import Video
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Subtitle, Chapter
|
||||||
|
|
||||||
|
class MAX(Service):
|
||||||
|
"""
|
||||||
|
Service code for MAX's streaming service (https://max.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Security: UHD@L1 FHD@L1 HD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["MAX", "max"]
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.|play\.)?max\.com/)?(?P<type>[^/]+)/(?P<id>[^/]+)"
|
||||||
|
|
||||||
|
VIDEO_CODEC_MAP = {
|
||||||
|
"H.264": ["avc1", "AVC", "Codec.AVC"],
|
||||||
|
"H.265": ["hvc1", "dvh1", "HEVC", "Codec.HEVC"]
|
||||||
|
}
|
||||||
|
|
||||||
|
AUDIO_CODEC_MAP = {
|
||||||
|
"AAC": "mp4a",
|
||||||
|
"AC3": "ac-3",
|
||||||
|
"EC3": "ec-3"
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="MAX", short_help="https://max.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return MAX(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.title = title
|
||||||
|
self.vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
self.acodec = ctx.parent.params.get("acodec")
|
||||||
|
self.range = ctx.parent.params.get("range_")
|
||||||
|
self.alang = ctx.parent.params.get("lang")
|
||||||
|
|
||||||
|
if self.range == 'HDR10':
|
||||||
|
self.vcodec = "H.265"
|
||||||
|
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_type, external_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
|
||||||
|
response = self.session.get(
|
||||||
|
f"https://default.prd.api.max.com/cms/routes/{content_type}/{external_id}?include=default",
|
||||||
|
)
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
title_id = data['data']['relationships']['target']['data']['id']
|
||||||
|
title_info = next(x['attributes'] for x in data['included'] if x['id'] == title_id)
|
||||||
|
content_title = title_info.get('title') or title_info['name'].split("-")[0]
|
||||||
|
|
||||||
|
if content_type == "movie" or content_type == "standalone":
|
||||||
|
metadata = self.session.get(
|
||||||
|
url=f"https://default.prd.api.max.com/content/videos/{external_id}/activeVideoForShow?&include=edit"
|
||||||
|
).json()['data']
|
||||||
|
|
||||||
|
|
||||||
|
release_date = metadata["attributes"].get("airDate") or metadata["attributes"].get("firstAvailableDate")
|
||||||
|
year = datetime.strptime(release_date, '%Y-%m-%dT%H:%M:%SZ').year
|
||||||
|
return Movies([Movie(
|
||||||
|
id_=title_id,
|
||||||
|
service=self.__class__,
|
||||||
|
name=content_title.title(),
|
||||||
|
year=year,
|
||||||
|
data=metadata,
|
||||||
|
language="en"
|
||||||
|
)])
|
||||||
|
|
||||||
|
if content_type == "show" or content_type == "mini-series":
|
||||||
|
episodes = []
|
||||||
|
if content_type == "mini-series":
|
||||||
|
alias = "generic-miniseries-page-rail-episodes"
|
||||||
|
else:
|
||||||
|
alias = "generic-%s-page-rail-episodes-tabbed-content" % (content_type)
|
||||||
|
|
||||||
|
included_dt = response.json()["included"]
|
||||||
|
season_data = [data for included in included_dt for key, data in included.items()
|
||||||
|
if key == "attributes" for k,d in data.items() if d == alias][0]
|
||||||
|
season_data = season_data["component"]["filters"][0]
|
||||||
|
|
||||||
|
seasons = [int(season["value"]) for season in season_data["options"]]
|
||||||
|
|
||||||
|
season_parameters = [(int(season["value"]), season["parameter"]) for season in season_data["options"]
|
||||||
|
for season_number in seasons if int(season["id"]) == int(season_number)]
|
||||||
|
|
||||||
|
if not season_parameters:
|
||||||
|
raise self.log.exit("season(s) %s not found")
|
||||||
|
|
||||||
|
data_paginas = self.session.get(url="https://default.prd.api.max.com/cms/collections/generic-show-page-rail-episodes-tabbed-content?include=default&pf[show.id]=%s" % (external_id)).json()
|
||||||
|
total_pages = data_paginas['data']['meta']['itemsTotalPages']
|
||||||
|
|
||||||
|
for pagina in range(1, total_pages + 1):
|
||||||
|
for (value, parameter) in season_parameters:
|
||||||
|
data = self.session.get(url="https://default.prd.api.max.com/cms/collections/generic-show-page-rail-episodes-tabbed-content?include=default&pf[show.id]=%s&%s&page[items.number]=%s" % (external_id, parameter, pagina)).json()
|
||||||
|
total_pages = data['data']['meta']['itemsTotalPages']
|
||||||
|
try:
|
||||||
|
episodes_dt = sorted([dt for dt in data["included"] if "attributes" in dt and "videoType" in
|
||||||
|
dt["attributes"] and dt["attributes"]["videoType"] == "EPISODE"
|
||||||
|
and int(dt["attributes"]["seasonNumber"]) == int(value)], key=lambda x: x["attributes"]["episodeNumber"])
|
||||||
|
except KeyError:
|
||||||
|
raise self.log.exit("season episodes were not found")
|
||||||
|
|
||||||
|
episodes.extend(episodes_dt)
|
||||||
|
|
||||||
|
titles = Series()
|
||||||
|
release_date = episodes[0]["attributes"].get("airDate") or episodes[0]["attributes"].get("firstAvailableDate")
|
||||||
|
year = datetime.strptime(release_date, '%Y-%m-%dT%H:%M:%SZ').year
|
||||||
|
|
||||||
|
for episode in episodes:
|
||||||
|
titles.add(Episode(
|
||||||
|
id_=episode['id'],
|
||||||
|
service=self.__class__,
|
||||||
|
name=episode['attributes']['name'],
|
||||||
|
year=year,
|
||||||
|
season=episode['attributes']['seasonNumber'],
|
||||||
|
number=episode['attributes']['episodeNumber'],
|
||||||
|
title=content_title.title(),
|
||||||
|
data=episode,
|
||||||
|
language="en"
|
||||||
|
))
|
||||||
|
|
||||||
|
return titles
|
||||||
|
|
||||||
|
def get_tracks(self, title):
|
||||||
|
edit_id = title.data['relationships']['edit']['data']['id']
|
||||||
|
|
||||||
|
response = self.session.post(
|
||||||
|
url=self.config['endpoints']['playbackInfo'],
|
||||||
|
json={
|
||||||
|
'appBundle': 'beam',
|
||||||
|
'consumptionType': 'streaming',
|
||||||
|
'deviceInfo': {
|
||||||
|
'deviceId': '2dec6cb0-eb34-45f9-bbc9-a0533597303c',
|
||||||
|
'browser': {
|
||||||
|
'name': 'chrome',
|
||||||
|
'version': '113.0.0.0',
|
||||||
|
},
|
||||||
|
'make': 'Microsoft',
|
||||||
|
'model': 'XBOX-Unknown',
|
||||||
|
'os': {
|
||||||
|
'name': 'Windows',
|
||||||
|
'version': '113.0.0.0',
|
||||||
|
},
|
||||||
|
'platform': 'XBOX',
|
||||||
|
'deviceType': 'xbox',
|
||||||
|
'player': {
|
||||||
|
'sdk': {
|
||||||
|
'name': 'Beam Player Console',
|
||||||
|
'version': '1.0.2.4',
|
||||||
|
},
|
||||||
|
'mediaEngine': {
|
||||||
|
'name': 'GLUON_BROWSER',
|
||||||
|
'version': '1.20.1',
|
||||||
|
},
|
||||||
|
'playerView': {
|
||||||
|
'height': 1080,
|
||||||
|
'width': 1920,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'editId': edit_id,
|
||||||
|
'capabilities': {
|
||||||
|
'manifests': {
|
||||||
|
'formats': {
|
||||||
|
'dash': {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'codecs': {
|
||||||
|
'video': {
|
||||||
|
'hdrFormats': [
|
||||||
|
'hlg',
|
||||||
|
'hdr10',
|
||||||
|
'dolbyvision5',
|
||||||
|
'dolbyvision8',
|
||||||
|
],
|
||||||
|
'decoders': [
|
||||||
|
{
|
||||||
|
'maxLevel': '6.2',
|
||||||
|
'codec': 'h265',
|
||||||
|
'levelConstraints': {
|
||||||
|
'width': {
|
||||||
|
'min': 1920,
|
||||||
|
'max': 3840,
|
||||||
|
},
|
||||||
|
'height': {
|
||||||
|
'min': 1080,
|
||||||
|
'max': 2160,
|
||||||
|
},
|
||||||
|
'framerate': {
|
||||||
|
'min': 15,
|
||||||
|
'max': 60,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'profiles': [
|
||||||
|
'main',
|
||||||
|
'main10',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'maxLevel': '4.2',
|
||||||
|
'codec': 'h264',
|
||||||
|
'levelConstraints': {
|
||||||
|
'width': {
|
||||||
|
'min': 640,
|
||||||
|
'max': 3840,
|
||||||
|
},
|
||||||
|
'height': {
|
||||||
|
'min': 480,
|
||||||
|
'max': 2160,
|
||||||
|
},
|
||||||
|
'framerate': {
|
||||||
|
'min': 15,
|
||||||
|
'max': 60,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'profiles': [
|
||||||
|
'high',
|
||||||
|
'main',
|
||||||
|
'baseline',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'audio': {
|
||||||
|
'decoders': [
|
||||||
|
{
|
||||||
|
'codec': 'aac',
|
||||||
|
'profiles': [
|
||||||
|
'lc',
|
||||||
|
'he',
|
||||||
|
'hev2',
|
||||||
|
'xhe',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'devicePlatform': {
|
||||||
|
'network': {
|
||||||
|
'lastKnownStatus': {
|
||||||
|
'networkTransportType': 'unknown',
|
||||||
|
},
|
||||||
|
'capabilities': {
|
||||||
|
'protocols': {
|
||||||
|
'http': {
|
||||||
|
'byteRangeRequests': True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'videoSink': {
|
||||||
|
'lastKnownStatus': {
|
||||||
|
'width': 1290,
|
||||||
|
'height': 2796,
|
||||||
|
},
|
||||||
|
'capabilities': {
|
||||||
|
'colorGamuts': [
|
||||||
|
'standard',
|
||||||
|
'wide',
|
||||||
|
],
|
||||||
|
'hdrFormats': [
|
||||||
|
'dolbyvision',
|
||||||
|
'hdr10plus',
|
||||||
|
'hdr10',
|
||||||
|
'hlg',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'gdpr': False,
|
||||||
|
'firstPlay': False,
|
||||||
|
'playbackSessionId': str(uuid.uuid4()),
|
||||||
|
'applicationSessionId': str(uuid.uuid4()),
|
||||||
|
'userPreferences': {},
|
||||||
|
'features': [],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
playback_data = response.json()
|
||||||
|
|
||||||
|
# TEST
|
||||||
|
video_info = next(x for x in playback_data['videos'] if x['type'] == 'main')
|
||||||
|
title.is_original_lang = Language.get(video_info['defaultAudioSelection']['language'])
|
||||||
|
|
||||||
|
fallback_url = playback_data["fallback"]["manifest"]["url"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.license_url = playback_data["drm"]["schemes"]["widevine"]["licenseUrl"]
|
||||||
|
drm_protection_enabled = True
|
||||||
|
except (KeyError, IndexError):
|
||||||
|
drm_protection_enabled = False
|
||||||
|
|
||||||
|
manifest_url = fallback_url.replace('_fallback', '')
|
||||||
|
|
||||||
|
tracks = DASH.from_url(url= manifest_url, session=self.session).to_tracks(language=title.language)
|
||||||
|
#tracks.subtitles.clear()
|
||||||
|
|
||||||
|
subtitles = self.get_subtitles(manifest_url, fallback_url)
|
||||||
|
|
||||||
|
subs = []
|
||||||
|
for subtitle in subtitles:
|
||||||
|
subs.append(
|
||||||
|
Subtitle(
|
||||||
|
id_=md5(subtitle["url"].encode()).hexdigest(),
|
||||||
|
url=subtitle["url"],
|
||||||
|
codec=Subtitle.Codec.from_codecs("vtt"),
|
||||||
|
language=subtitle["language"],
|
||||||
|
forced=subtitle['name'] == 'Forced',
|
||||||
|
sdh=subtitle['name'] == 'SDH'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tracks.add(subs)
|
||||||
|
|
||||||
|
if self.vcodec:
|
||||||
|
tracks.videos = [x for x in tracks.videos if (x.codec or "")[:5] == self.vcodec]
|
||||||
|
|
||||||
|
if self.acodec:
|
||||||
|
tracks.audios = [x for x in tracks.audio if (x.codec or "")[:4] == self.AUDIO_CODEC_MAP[self.acodec]]
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
# track.needs_proxy = True
|
||||||
|
#if isinstance(track, Video):
|
||||||
|
#print(dir(track))
|
||||||
|
#track.Codec = track.extra[0].get("codecs")
|
||||||
|
# track.hdr10 = codec[0:4] in ("hvc1", "hev1") and codec[5] == "2"
|
||||||
|
# track.dv = codec[0:4] in ("dvh1", "dvhe")
|
||||||
|
if isinstance(track, Subtitle) and track.codec == "":
|
||||||
|
track.codec = "webvtt"
|
||||||
|
|
||||||
|
title.data['info'] = video_info
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
return []
|
||||||
|
#chapters = []
|
||||||
|
#video_info = title.data['info']
|
||||||
|
#if 'annotations' in video_info:
|
||||||
|
# chapters.append(Chapter(number=1, title='Chapter 1', timecode='00:00:00.0000'))
|
||||||
|
# chapters.append(Chapter(number=2, title='Credits', timecode=self.convert_timecode(video_info['annotations'][0]['start'])))
|
||||||
|
# chapters.append(Chapter(number=3, title='Chapter 2', timecode=self.convert_timecode(video_info['annotations'][0]['end'])))
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge, **_):
|
||||||
|
return self.session.post(
|
||||||
|
url=self.license_url,
|
||||||
|
data=challenge # expects bytes
|
||||||
|
).content
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
token = self.session.cookies.get("st")
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
device_id = json.loads(self.session.cookies.get_dict()["session"])
|
||||||
|
self.session.headers.update({
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/113.0',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-disco-client': 'WEB:NT 10.0:beam:0.0.0',
|
||||||
|
'x-disco-params': 'realm=bolt,bid=beam,features=ar',
|
||||||
|
'x-device-info': 'beam/0.0.0 (desktop/desktop; Windows/NT 10.0; b3950c49-ed17-49d0-beb2-11b1d61e5672/da0cdd94-5a39-42ef-aa68-54cbc1b852c3)',
|
||||||
|
'traceparent': '00-053c91686df1e7ee0b0b0f7fda45ee6a-f5a98d6877ba2515-01',
|
||||||
|
'tracestate': f'wbd=session:{device_id}',
|
||||||
|
'Origin': 'https://play.max.com',
|
||||||
|
'Referer': 'https://play.max.com/',
|
||||||
|
})
|
||||||
|
|
||||||
|
auth_token = self.get_device_token()
|
||||||
|
self.session.headers.update({
|
||||||
|
"x-wbd-session-state": auth_token
|
||||||
|
})
|
||||||
|
|
||||||
|
def get_device_token(self):
|
||||||
|
response = self.session.post(
|
||||||
|
'https://default.prd.api.max.com/session-context/headwaiter/v1/bootstrap',
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
return response.headers.get('x-wbd-session-state')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def convert_timecode(time):
|
||||||
|
secs, ms = divmod(time, 1)
|
||||||
|
mins, secs = divmod(secs, 60)
|
||||||
|
hours, mins = divmod(mins, 60)
|
||||||
|
ms = ms * 10000
|
||||||
|
chapter_time = '%02d:%02d:%02d.%04d' % (hours, mins, secs, ms)
|
||||||
|
|
||||||
|
return chapter_time
|
||||||
|
|
||||||
|
def get_subtitles(self, mpd_url, fallback_url):
|
||||||
|
base_url = "/".join(fallback_url.split("/")[:-1]) + "/"
|
||||||
|
xml = xmltodict.parse(requests.get(mpd_url).text)
|
||||||
|
|
||||||
|
try:
|
||||||
|
tracks = xml["MPD"]["Period"][0]["AdaptationSet"]
|
||||||
|
except KeyError:
|
||||||
|
tracks = xml["MPD"]["Period"]["AdaptationSet"]
|
||||||
|
|
||||||
|
subs_tracks_js = []
|
||||||
|
for subs_tracks in tracks:
|
||||||
|
if subs_tracks['@contentType'] == 'text':
|
||||||
|
for x in self.force_instance(subs_tracks, "Representation"):
|
||||||
|
try:
|
||||||
|
path = re.search(r'(t/\w+/)', x["SegmentTemplate"]["@media"])[1]
|
||||||
|
except AttributeError:
|
||||||
|
path = 't/sub/'
|
||||||
|
|
||||||
|
is_sdh = False
|
||||||
|
text = ""
|
||||||
|
if subs_tracks["Role"]["@value"] == "caption":
|
||||||
|
#url = base_url + path + subs_tracks['@lang'] + '_cc.vtt'
|
||||||
|
url = base_url + path + subs_tracks['@lang'] + ('_sdh.vtt' if 'sdh' in subs_tracks["Label"].lower() else '_cc.vtt')
|
||||||
|
is_sdh = True
|
||||||
|
text = " (SDH)"
|
||||||
|
|
||||||
|
is_forced = False
|
||||||
|
text = ""
|
||||||
|
if subs_tracks["Role"]["@value"] == "forced-subtitle":
|
||||||
|
url = base_url + path + subs_tracks['@lang'] + '_forced.vtt'
|
||||||
|
text = " (Forced)"
|
||||||
|
is_forced = True
|
||||||
|
|
||||||
|
if subs_tracks["Role"]["@value"] == "subtitle":
|
||||||
|
url = base_url + path + subs_tracks['@lang'] + '_sub.vtt'
|
||||||
|
|
||||||
|
subs_tracks_js.append({
|
||||||
|
"url": url,
|
||||||
|
"format": "vtt",
|
||||||
|
"language": subs_tracks["@lang"],
|
||||||
|
"languageDescription": Language.make(language=subs_tracks["@lang"].split('-')[0]).display_name() + text,
|
||||||
|
"name": "SDH" if is_sdh else "Forced" if is_forced else "Full",
|
||||||
|
})
|
||||||
|
|
||||||
|
subs_tracks_js = self.remove_dupe(subs_tracks_js)
|
||||||
|
|
||||||
|
return subs_tracks_js
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def force_instance(data, variable):
|
||||||
|
if isinstance(data[variable], list):
|
||||||
|
X = data[variable]
|
||||||
|
else:
|
||||||
|
X = [data[variable]]
|
||||||
|
return X
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def remove_dupe(items):
|
||||||
|
valores_chave = set()
|
||||||
|
new_items = []
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
valor = item['url']
|
||||||
|
if valor not in valores_chave:
|
||||||
|
new_items.append(item)
|
||||||
|
valores_chave.add(valor)
|
||||||
|
|
||||||
|
return new_items
|
||||||
5
services/MAX/config.yaml
Normal file
5
services/MAX/config.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
endpoints:
|
||||||
|
contentRoutes: 'https://default.prd.api.max.com/cms/routes/%s/%s?include=default'
|
||||||
|
moviePages: 'https://default.prd.api.max.com/content/videos/%s/activeVideoForShow?&include=edit'
|
||||||
|
playbackInfo: 'https://default.prd.api.max.com/any/playback/v1/playbackInfo'
|
||||||
|
showPages: 'https://default.prd.api.max.com/cms/collections/generic-show-page-rail-episodes-tabbed-content?include=default&pf[show.id]=%s&%s'
|
||||||
207
services/MY5/__init__.py
Normal file
207
services/MY5/__init__.py
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any, Union
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from click import Context
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Tracks
|
||||||
|
from devine.core.utils.sslciphers import SSLCiphers
|
||||||
|
from pywidevine.cdm import Cdm as WidevineCdm
|
||||||
|
|
||||||
|
|
||||||
|
class MY5(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for Channel 5's My5 streaming service (https://channel5.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input for series/films/episodes can be either complete URL or just the slug/path:
|
||||||
|
https://www.channel5.com/the-cuckoo OR the-cuckoo OR the-cuckoo/season-1/episode-1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Known bugs:
|
||||||
|
- The progress bar is broken for certain DASH manifests
|
||||||
|
See issue: https://github.com/devine-dl/devine/issues/106
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("channel5", "ch5", "c5")
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?channel5\.com(?:/show)?/)?(?P<id>[a-z0-9-]+)(?:/(?P<sea>[a-z0-9-]+))?(?:/(?P<ep>[a-z0-9-]+))?"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="MY5", short_help="https://channel5.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> MY5:
|
||||||
|
return MY5(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.session.headers.update({"user-agent": self.config["user_agent"]})
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"platform": "my5desktop",
|
||||||
|
"friendly": "1",
|
||||||
|
"query": self.title,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
results = r.json()
|
||||||
|
for result in results["shows"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("f_name"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("s_desc"),
|
||||||
|
label=result.get("genre"),
|
||||||
|
url="https://www.channel5.com/show/" + result.get("f_name"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
title, season, episode = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "sea", "ep"))
|
||||||
|
if not title:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
if season and episode:
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["single"].format(
|
||||||
|
show=title,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
episode = r.json()
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode.get("sh_title"),
|
||||||
|
season=int(episode.get("sea_num")) if episode.get("sea_num") else 0,
|
||||||
|
number=int(episode.get("ep_num")) if episode.get("ep_num") else 0,
|
||||||
|
name=episode.get("sh_title"),
|
||||||
|
language="en",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["episodes"].format(show=title))
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
if data["episodes"][0]["genre"] == "Film":
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
year=None,
|
||||||
|
name=movie.get("sh_title"),
|
||||||
|
language="en", # TODO: don't assume
|
||||||
|
)
|
||||||
|
for movie in data.get("episodes")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode.get("sh_title"),
|
||||||
|
season=int(episode.get("sea_num")) if episode.get("sea_num") else 0,
|
||||||
|
number=int(episode.get("ep_num")) if episode.get("sea_num") else 0,
|
||||||
|
name=episode.get("title"),
|
||||||
|
language="en", # TODO: don't assume
|
||||||
|
)
|
||||||
|
for episode in data["episodes"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
self.manifest, self.license = self.get_playlist(title.id)
|
||||||
|
|
||||||
|
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["representation"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
r = self.session.post(self.license, data=challenge)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def get_playlist(self, asset_id: str) -> tuple:
|
||||||
|
session = self.session
|
||||||
|
for prefix in ("https://", "http://"):
|
||||||
|
session.mount(prefix, SSLCiphers())
|
||||||
|
|
||||||
|
cert_binary = base64.b64decode(self.config["certificate"])
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".pem") as cert_file:
|
||||||
|
cert_file.write(cert_binary)
|
||||||
|
cert_path = cert_file.name
|
||||||
|
try:
|
||||||
|
r = session.get(url=self.config["endpoints"]["auth"].format(title_id=asset_id), cert=cert_path)
|
||||||
|
except requests.RequestException as e:
|
||||||
|
if "Max retries exceeded" in str(e):
|
||||||
|
raise ConnectionError(
|
||||||
|
"Permission denied. If you're behind a VPN/proxy, you might be blocked"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConnectionError(f"Failed to request assets: {str(e)}")
|
||||||
|
finally:
|
||||||
|
os.remove(cert_path)
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
if not data.get("assets"):
|
||||||
|
raise ValueError(f"Could not find asset: {data}")
|
||||||
|
|
||||||
|
asset = [x for x in data["assets"] if x["drm"] == "widevine"][0]
|
||||||
|
rendition = asset["renditions"][0]
|
||||||
|
mpd_url = rendition["url"]
|
||||||
|
lic_url = asset["keyserver"]
|
||||||
|
|
||||||
|
parse = urlparse(mpd_url)
|
||||||
|
path = parse.path.split("/")
|
||||||
|
path[-1] = path[-1].split("-")[0].split("_")[0]
|
||||||
|
manifest = urlunparse(parse._replace(path="/".join(path)))
|
||||||
|
manifest += ".mpd" if not manifest.endswith("mpd") else ""
|
||||||
|
|
||||||
|
return manifest, lic_url
|
||||||
38
services/MY5/config.yaml
Normal file
38
services/MY5/config.yaml
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
user_agent: Dalvik/2.1.0 (Linux; U; Android 14; SM-S901B Build/UP1A.231005.007)
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base: https://corona.channel5.com
|
||||||
|
content: https://corona.channel5.com/shows/{show}.json?platform=my5android
|
||||||
|
episodes: https://corona.channel5.com/shows/{show}/episodes.json?platform=my5android
|
||||||
|
single: https://corona.channel5.com/shows/{show}/seasons/{season}/episodes/{episode}.json?platform=my5android
|
||||||
|
auth: https://cassie-auth.channel5.com/api/v2/media/my5androidhydradash/{title_id}.json
|
||||||
|
search: https://corona.channel5.com/shows/search.json
|
||||||
|
|
||||||
|
certificate: |
|
||||||
|
LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tDQpNSUlDdXpDQ0FpU2dBd0lCQWdJRVhMU1BGVEFOQmdrcWhraUc5dzBCQVFVRkFE
|
||||||
|
QmxNUXN3Q1FZRFZRUUdFd0pIDQpRakVWTUJNR0ExVUVCd3dNUkdWbVlYVnNkQ0JEYVhSNU1SSXdFQVlEVlFRS0RBbERhR0Z1Ym1W
|
||||||
|
c0lEVXhEekFODQpCZ05WQkFzTUJrTmhjM05wWlRFYU1CZ0dBMVVFQXd3UlEyRnpjMmxsSUUxbFpHbGhJRUYxZEdnd0hoY05NVGt3
|
||||||
|
DQpOREUxTVRRd016QXhXaGNOTWprd05ERTFNVFF3TXpBeFdqQ0JqakVMTUFrR0ExVUVCaE1DUjBJeEVqQVFCZ05WDQpCQW9NQ1VO
|
||||||
|
b1lXNXVaV3dnTlRFWE1CVUdBMVVFQ3d3T1EyRnpjMmxsSUdOc2FXVnVkSE14VWpCUUJnTlZCQU1NDQpTVU5oYzNOcFpTQlRaV3ht
|
||||||
|
TFhOcFoyNWxaQ0JEWlhKMGFXWnBZMkYwWlNCbWIzSWdUWGsxSUVGdVpISnZhV1FnDQpUbVY0ZENCSFpXNGdZMnhwWlc1MElERTFO
|
||||||
|
VFV6TXpZNU9ERXdnWjh3RFFZSktvWklodmNOQVFFQkJRQURnWTBBDQpNSUdKQW9HQkFNbVVTSHFCZ3pwbThXelVHZ2VDSWZvSTI3
|
||||||
|
QlovQmNmWktpbnl5dXFNVlpDNXRLaUtaRWpydFV4DQpoMXFVcDJSSkN3Ui9RcENPQ2RQdFhzMENzekZvd1ByTlY4RHFtUXZqbzY5
|
||||||
|
dlhvTEM3c2RLUjQ1cEFUQU8vY3JLDQorTUFPUXo1VWEyQ1ZrYnY1SCtaMVhWWndqbm1qNGJHZEJHM005b0NzQlVqTEh0bm1nQSty
|
||||||
|
QWdNQkFBR2pUakJNDQpNQjBHQTFVZERnUVdCQlNVVUhrY3JKNUVkVTVWM2ZJbXQra1ljdkdnZFRBTEJnTlZIUThFQkFNQ0E3Z3dD
|
||||||
|
UVlEDQpWUjBUQkFJd0FEQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFOQmdrcWhraUc5dzBCQVFVRkFBT0JnUUFpDQpHNi84
|
||||||
|
OUFEaDhEOUs0OXZjeklMQ2pqbGh6bG5US09GM2l1Um0vSjZYaWtxY3RxSDF0a01na0FXcHAwQldBRm9IDQpJbU5WSEtKdTRnZXgy
|
||||||
|
cEtLejNqOVlRNG5EWENQVTdVb0N2aDl5TTNYT0RITWZRT01sZkRtMU9GZkh2QkJvSHNVDQpHSE9EQTkwQi8xcU0xSlFaZzBOVjZi
|
||||||
|
UllrUytCOWdtSFI4dXhtZktrL0E9PQ0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQ0KLS0tLS1CRUdJTiBQUklWQVRFIEtFWS0t
|
||||||
|
LS0tDQpNSUlDZHdJQkFEQU5CZ2txaGtpRzl3MEJBUUVGQUFTQ0FtRXdnZ0pkQWdFQUFvR0JBTW1VU0hxQmd6cG04V3pVDQpHZ2VD
|
||||||
|
SWZvSTI3QlovQmNmWktpbnl5dXFNVlpDNXRLaUtaRWpydFV4aDFxVXAyUkpDd1IvUXBDT0NkUHRYczBDDQpzekZvd1ByTlY4RHFt
|
||||||
|
UXZqbzY5dlhvTEM3c2RLUjQ1cEFUQU8vY3JLK01BT1F6NVVhMkNWa2J2NUgrWjFYVlp3DQpqbm1qNGJHZEJHM005b0NzQlVqTEh0
|
||||||
|
bm1nQStyQWdNQkFBRUNnWUFjTVY4SnN6OTFWWnlDaWcreDZTTnpZdlhHDQo3bTd4bFBSeEdqYXlQclZ6eVJ1YmJnNitPKzFoNS9G
|
||||||
|
MFc4SWxwb21oOFdLUDhTMnl0RXBFQmhLbDRHN001WXdqDQp0SCtCVXFNMTNjbFdiQkxuQTZMT2RVeEVDTVhIUktjdHk5UE52UlJQ
|
||||||
|
cU9aV0YycDc5U1BFdFY5Q2o1SXNaVUdNDQpRcHYybk5oN1M2MUZGRVRuSVFKQkFPTXJNd2tnOGQzbksyS0lnVUNrcEtCRHlGTUJj
|
||||||
|
UXN0NG82VkxvVjNjenBwDQpxMW5FWGx4WnduMFh6Ni9GVjRWdTZYTjJLLzQxL2pCeWdTUlFXa05YVThNQ1FRRGpLYXVpdE1UajBM
|
||||||
|
ajU3QkJ3DQppNkNON0VFeUJSSkZaVGRSMDM4ZzkxSEFoUkVXVWpuQ0Vrc1UwcTl4TUNOdnM3OFN4RmQ1ODg5RUJQTnd1RDdvDQor
|
||||||
|
NTM1QWtFQTNwVTNYbHh2WUhQZktKNkR0cWtidlFSdFJoZUZnZVNsdGZzcUtCQVFVVTIwWFRKeEdwL0FWdjE3DQp1OGZxcDQwekpM
|
||||||
|
VEhDa0F4SFpzME9qYVpHcDU0TFFKQWJtM01iUjA1ZFpINnlpdlMxaE5hYW9QR01iMjdZeGJRDQpMS3dHNmd5d3BrbEp4RE1XdHR4
|
||||||
|
VHVYeXVJdlVHMVA5cFRJTThEeUhSeVR3cTU4bjVjeU1XYVFKQkFMVFRwZkVtDQoxdWhCeUd0NEtab3dYM2dhREpVZGU0ZjBwN3Ry
|
||||||
|
RFZGcExDNVJYcVVBQXNBQ2pzTHNYaEFadlovUEEwUDBiU2hmDQp4cUFRa2lnYmNKRXdxdjQ9DQotLS0tLUVORCBQUklWQVRFIEtF
|
||||||
|
WS0tLS0t
|
||||||
10
services/NF/MSL/MSLKeys.py
Normal file
10
services/NF/MSL/MSLKeys.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from .MSLObject import MSLObject
|
||||||
|
|
||||||
|
|
||||||
|
class MSLKeys(MSLObject):
|
||||||
|
def __init__(self, encryption=None, sign=None, rsa=None, mastertoken=None, cdm_session=None):
|
||||||
|
self.encryption = encryption
|
||||||
|
self.sign = sign
|
||||||
|
self.rsa = rsa
|
||||||
|
self.mastertoken = mastertoken
|
||||||
|
self.cdm_session = cdm_session
|
||||||
6
services/NF/MSL/MSLObject.py
Normal file
6
services/NF/MSL/MSLObject.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import jsonpickle
|
||||||
|
|
||||||
|
|
||||||
|
class MSLObject:
|
||||||
|
def __repr__(self):
|
||||||
|
return "<{} {}>".format(self.__class__.__name__, jsonpickle.encode(self, unpicklable=False))
|
||||||
450
services/NF/MSL/__init__.py
Normal file
450
services/NF/MSL/__init__.py
Normal file
@ -0,0 +1,450 @@
|
|||||||
|
import base64
|
||||||
|
import gzip
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import zlib
|
||||||
|
from datetime import datetime
|
||||||
|
from io import BytesIO
|
||||||
|
import ssl
|
||||||
|
import traceback
|
||||||
|
import jsonpickle
|
||||||
|
import requests
|
||||||
|
from Cryptodome.Cipher import AES, PKCS1_OAEP
|
||||||
|
from Cryptodome.Hash import HMAC, SHA256
|
||||||
|
from Cryptodome.PublicKey import RSA
|
||||||
|
from Cryptodome.Random import get_random_bytes
|
||||||
|
from Cryptodome.Util import Padding
|
||||||
|
|
||||||
|
from .MSLKeys import MSLKeys
|
||||||
|
from .schemes import EntityAuthenticationSchemes # noqa: F401
|
||||||
|
from .schemes import KeyExchangeSchemes
|
||||||
|
from .schemes.EntityAuthentication import EntityAuthentication
|
||||||
|
from .schemes.KeyExchangeRequest import KeyExchangeRequest
|
||||||
|
from pywidevine import RemoteCdm
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from requests.packages.urllib3.poolmanager import PoolManager
|
||||||
|
from requests.packages.urllib3.util import ssl_
|
||||||
|
|
||||||
|
|
||||||
|
class MSL:
|
||||||
|
log = logging.getLogger("MSL")
|
||||||
|
|
||||||
|
def __init__(self, session, endpoint, sender, keys, message_id, user_auth=None):
|
||||||
|
CIPHERS = "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:AES256-SHA"
|
||||||
|
|
||||||
|
class TlsAdapter(HTTPAdapter):
|
||||||
|
def __init__(self, ssl_options=0, **kwargs):
|
||||||
|
self.ssl_options = ssl_options
|
||||||
|
super(TlsAdapter, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def init_poolmanager(self, *pool_args, **pool_kwargs):
|
||||||
|
ctx = ssl_.create_urllib3_context(
|
||||||
|
ciphers=CIPHERS,
|
||||||
|
cert_reqs=ssl.CERT_NONE, # Changed from CERT_REQUIRED
|
||||||
|
options=self.ssl_options,
|
||||||
|
)
|
||||||
|
ctx.check_hostname = False # Disable hostname checking
|
||||||
|
# Load system CA certificates
|
||||||
|
ctx.load_default_certs()
|
||||||
|
self.poolmanager = PoolManager(
|
||||||
|
*pool_args,
|
||||||
|
ssl_context=ctx,
|
||||||
|
assert_hostname=False, # Disable hostname verification
|
||||||
|
**pool_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
session = requests.session()
|
||||||
|
adapter = TlsAdapter(ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
|
||||||
|
session.mount("https://", adapter)
|
||||||
|
self.session = session
|
||||||
|
self.endpoint = endpoint
|
||||||
|
self.sender = sender
|
||||||
|
self.keys = keys
|
||||||
|
self.user_auth = user_auth
|
||||||
|
self.message_id = message_id
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handshake(cls, scheme, session, endpoint, sender, cdm=None, msl_keys_path=None):
|
||||||
|
message_id = random.randint(0, pow(2, 52))
|
||||||
|
msl_keys = MSL.load_cache_data(msl_keys_path)
|
||||||
|
if msl_keys is not None:
|
||||||
|
cls.log.info("Using cached MSL data")
|
||||||
|
else:
|
||||||
|
msl_keys = MSLKeys()
|
||||||
|
if scheme != KeyExchangeSchemes.Widevine:
|
||||||
|
msl_keys.rsa = RSA.generate(2048)
|
||||||
|
|
||||||
|
if not cdm:
|
||||||
|
cls.log.error("- No cached data and no CDM specified")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# if not msl_keys_path:
|
||||||
|
# cls.log.error("- No cached data and no MSL key path specified")
|
||||||
|
# sys.exit(1)
|
||||||
|
|
||||||
|
if scheme == KeyExchangeSchemes.Widevine:
|
||||||
|
msl_keys.cdm_session = cdm.open(pssh=b"\x0a\x7a\x00\x6c\x38\x2b", raw=True, offline=True)
|
||||||
|
keyrequestdata = KeyExchangeRequest.Widevine(keyrequest=cdm.get_license_challenge(msl_keys.cdm_session))
|
||||||
|
else:
|
||||||
|
keyrequestdata = KeyExchangeRequest.AsymmetricWrapped(
|
||||||
|
keypairid="superKeyPair",
|
||||||
|
mechanism="JWK_RSA",
|
||||||
|
publickey=msl_keys.rsa.publickey().exportKey(format="DER"),
|
||||||
|
)
|
||||||
|
|
||||||
|
data = jsonpickle.encode(
|
||||||
|
{
|
||||||
|
"entityauthdata": EntityAuthentication.Unauthenticated(sender),
|
||||||
|
"headerdata": base64.b64encode(
|
||||||
|
MSL.generate_msg_header(
|
||||||
|
message_id=message_id, sender=sender, is_handshake=True, keyrequestdata=keyrequestdata
|
||||||
|
).encode("utf-8")
|
||||||
|
).decode("utf-8"),
|
||||||
|
"signature": "",
|
||||||
|
},
|
||||||
|
unpicklable=False,
|
||||||
|
)
|
||||||
|
data += json.dumps(
|
||||||
|
{
|
||||||
|
"payload": base64.b64encode(
|
||||||
|
json.dumps({"messageid": message_id, "data": "", "sequencenumber": 1, "endofmsg": True}).encode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
).decode("utf-8"),
|
||||||
|
"signature": "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = session.post(url=endpoint, data=data)
|
||||||
|
except requests.HTTPError as e:
|
||||||
|
cls.log.error(f"- Key exchange failed, response data is unexpected: {e.response.text}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
key_exchange = r.json() # expecting no payloads, so this is fine
|
||||||
|
if "errordata" in key_exchange:
|
||||||
|
cls.log.error(
|
||||||
|
"- Key exchange failed: "
|
||||||
|
+ json.loads(base64.b64decode(key_exchange["errordata"]).decode())["errormsg"]
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# parse the crypto keys
|
||||||
|
key_response_data = json.JSONDecoder().decode(base64.b64decode(key_exchange["headerdata"]).decode("utf-8"))[
|
||||||
|
"keyresponsedata"
|
||||||
|
]
|
||||||
|
|
||||||
|
if key_response_data["scheme"] != str(scheme):
|
||||||
|
cls.log.error("- Key exchange scheme mismatch occurred")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
key_data = key_response_data["keydata"]
|
||||||
|
if scheme == KeyExchangeSchemes.Widevine:
|
||||||
|
if isinstance(cdm.device, RemoteCdm):
|
||||||
|
msl_keys.encryption, msl_keys.sign = cdm.device.exchange(
|
||||||
|
cdm.sessions[msl_keys.cdm_session],
|
||||||
|
license_res=key_data["cdmkeyresponse"],
|
||||||
|
enc_key_id=base64.b64decode(key_data["encryptionkeyid"]),
|
||||||
|
hmac_key_id=base64.b64decode(key_data["hmackeyid"]),
|
||||||
|
)
|
||||||
|
cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"])
|
||||||
|
else:
|
||||||
|
cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"])
|
||||||
|
keys = cdm.get_keys(msl_keys.cdm_session)
|
||||||
|
msl_keys.encryption = MSL.get_widevine_key(
|
||||||
|
kid=base64.b64decode(key_data["encryptionkeyid"]),
|
||||||
|
keys=keys,
|
||||||
|
permissions=["AllowEncrypt", "AllowDecrypt"],
|
||||||
|
)
|
||||||
|
msl_keys.sign = MSL.get_widevine_key(
|
||||||
|
kid=base64.b64decode(key_data["hmackeyid"]),
|
||||||
|
keys=keys,
|
||||||
|
permissions=["AllowSign", "AllowSignatureVerify"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cipher_rsa = PKCS1_OAEP.new(msl_keys.rsa)
|
||||||
|
msl_keys.encryption = MSL.base64key_decode(
|
||||||
|
json.JSONDecoder().decode(
|
||||||
|
cipher_rsa.decrypt(base64.b64decode(key_data["encryptionkey"])).decode("utf-8")
|
||||||
|
)["k"]
|
||||||
|
)
|
||||||
|
msl_keys.sign = MSL.base64key_decode(
|
||||||
|
json.JSONDecoder().decode(
|
||||||
|
cipher_rsa.decrypt(base64.b64decode(key_data["hmackey"])).decode("utf-8")
|
||||||
|
)["k"]
|
||||||
|
)
|
||||||
|
msl_keys.mastertoken = key_response_data["mastertoken"]
|
||||||
|
|
||||||
|
MSL.cache_keys(msl_keys, msl_keys_path)
|
||||||
|
cls.log.info("MSL handshake successful")
|
||||||
|
return cls(session=session, endpoint=endpoint, sender=sender, keys=msl_keys, message_id=message_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_cache_data(msl_keys_path=None):
|
||||||
|
if not msl_keys_path:
|
||||||
|
return None
|
||||||
|
# with open(msl_keys_path, encoding="utf-8") as fd:
|
||||||
|
# msl_keys = jsonpickle.decode(fd.read())
|
||||||
|
msl_keys = msl_keys_path.data
|
||||||
|
if msl_keys.rsa:
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
# expects RsaKey, but is a string, this is because jsonpickle can't pickle RsaKey object
|
||||||
|
# so as a workaround it exports to PEM, and then when reading, it imports that PEM back
|
||||||
|
# to an RsaKey :)
|
||||||
|
msl_keys.rsa = RSA.importKey(msl_keys.rsa)
|
||||||
|
# If it's expired or close to, return None as it's unusable
|
||||||
|
if (
|
||||||
|
msl_keys.mastertoken
|
||||||
|
and (
|
||||||
|
(
|
||||||
|
datetime.utcfromtimestamp(
|
||||||
|
int(
|
||||||
|
json.JSONDecoder().decode(
|
||||||
|
base64.b64decode(msl_keys.mastertoken["tokendata"]).decode("utf-8")
|
||||||
|
)["expiration"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
- datetime.now()
|
||||||
|
).total_seconds()
|
||||||
|
/ 60
|
||||||
|
/ 60
|
||||||
|
)
|
||||||
|
< 10
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
return msl_keys
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def cache_keys(msl_keys, msl_keys_path):
|
||||||
|
# os.makedirs(os.path.dirname(msl_keys_path), exist_ok=True)
|
||||||
|
if msl_keys.rsa:
|
||||||
|
# jsonpickle can't pickle RsaKey objects :(
|
||||||
|
msl_keys.rsa = msl_keys.rsa.export_key()
|
||||||
|
msl_keys_path.set(msl_keys)
|
||||||
|
# with open(msl_keys_path, "w", encoding="utf-8") as fd:
|
||||||
|
# fd.write(jsonpickle.encode(msl_keys))
|
||||||
|
if msl_keys.rsa:
|
||||||
|
# re-import now
|
||||||
|
msl_keys.rsa = RSA.importKey(msl_keys.rsa)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_msg_header(
|
||||||
|
message_id, sender, is_handshake, userauthdata=None, keyrequestdata=None, compression="GZIP"
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
The MSL header carries all MSL data used for entity and user authentication, message encryption
|
||||||
|
and verification, and service tokens. Portions of the MSL header are encrypted.
|
||||||
|
https://github.com/Netflix/msl/wiki/Messages#header-data
|
||||||
|
|
||||||
|
:param message_id: number against which payload chunks are bound to protect against replay.
|
||||||
|
:param sender: ESN
|
||||||
|
:param is_handshake: This flag is set true if the message is a handshake message and will not include any
|
||||||
|
payload chunks. It will include keyrequestdata.
|
||||||
|
:param userauthdata: UserAuthData
|
||||||
|
:param keyrequestdata: KeyRequestData
|
||||||
|
:param compression: Supported compression algorithms.
|
||||||
|
|
||||||
|
:return: The base64 encoded JSON String of the header
|
||||||
|
"""
|
||||||
|
header_data = {
|
||||||
|
"messageid": message_id,
|
||||||
|
"renewable": True, # MUST be True if is_handshake
|
||||||
|
"handshake": is_handshake,
|
||||||
|
"capabilities": {
|
||||||
|
"compressionalgos": [compression] if compression else [],
|
||||||
|
"languages": ["en-US"], # bcp-47
|
||||||
|
"encoderformats": ["JSON"],
|
||||||
|
},
|
||||||
|
"timestamp": int(time.time()),
|
||||||
|
# undocumented or unused:
|
||||||
|
"sender": sender,
|
||||||
|
"nonreplayable": False,
|
||||||
|
"recipient": "Netflix",
|
||||||
|
}
|
||||||
|
if userauthdata:
|
||||||
|
header_data["userauthdata"] = userauthdata
|
||||||
|
if keyrequestdata:
|
||||||
|
header_data["keyrequestdata"] = [keyrequestdata]
|
||||||
|
return jsonpickle.encode(header_data, unpicklable=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_widevine_key(cls, kid, keys, permissions):
|
||||||
|
for key in keys:
|
||||||
|
if key.kid != kid:
|
||||||
|
continue
|
||||||
|
if key.type != "OPERATOR_SESSION":
|
||||||
|
cls.log.warning(f"Widevine Key Exchange: Wrong key type (not operator session) key {key}")
|
||||||
|
continue
|
||||||
|
if not set(permissions) <= set(key.permissions):
|
||||||
|
cls.log.warning(f"Widevine Key Exchange: Incorrect permissions, key {key}, needed perms {permissions}")
|
||||||
|
continue
|
||||||
|
return key.key
|
||||||
|
return None
|
||||||
|
|
||||||
|
def send_message(self, endpoint, params, application_data, userauthdata=None):
|
||||||
|
message = self.create_message(application_data, userauthdata)
|
||||||
|
res = self.session.post(url=endpoint, data=message, params=params)
|
||||||
|
header, payload_data = self.parse_message(res.text)
|
||||||
|
if "errordata" in header:
|
||||||
|
self.log.error(
|
||||||
|
"- MSL response message contains an error: {}".format(
|
||||||
|
json.loads(base64.b64decode(header["errordata"].encode("utf-8")).decode("utf-8"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
return header, payload_data
|
||||||
|
|
||||||
|
def create_message(self, application_data, userauthdata=None):
|
||||||
|
self.message_id += 1 # new message must ue a new message id
|
||||||
|
|
||||||
|
headerdata = self.encrypt(
|
||||||
|
self.generate_msg_header(
|
||||||
|
message_id=self.message_id, sender=self.sender, is_handshake=False, userauthdata=userauthdata
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
header = json.dumps(
|
||||||
|
{
|
||||||
|
"headerdata": base64.b64encode(headerdata.encode("utf-8")).decode("utf-8"),
|
||||||
|
"signature": self.sign(headerdata).decode("utf-8"),
|
||||||
|
"mastertoken": self.keys.mastertoken,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
payload_chunks = [
|
||||||
|
self.encrypt(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"messageid": self.message_id,
|
||||||
|
"data": self.gzip_compress(json.dumps(application_data).encode("utf-8")).decode("utf-8"),
|
||||||
|
"compressionalgo": "GZIP",
|
||||||
|
"sequencenumber": 1, # todo ; use sequence_number from master token instead?
|
||||||
|
"endofmsg": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
message = header
|
||||||
|
for payload_chunk in payload_chunks:
|
||||||
|
message += json.dumps(
|
||||||
|
{
|
||||||
|
"payload": base64.b64encode(payload_chunk.encode("utf-8")).decode("utf-8"),
|
||||||
|
"signature": self.sign(payload_chunk).decode("utf-8"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
def decrypt_payload_chunks(self, payload_chunks):
|
||||||
|
"""
|
||||||
|
Decrypt and extract data from payload chunks
|
||||||
|
|
||||||
|
:param payload_chunks: List of payload chunks
|
||||||
|
:return: json object
|
||||||
|
"""
|
||||||
|
raw_data = ""
|
||||||
|
|
||||||
|
for payload_chunk in payload_chunks:
|
||||||
|
# todo ; verify signature of payload_chunk["signature"] against payload_chunk["payload"]
|
||||||
|
# expecting base64-encoded json string
|
||||||
|
payload_chunk = json.loads(base64.b64decode(payload_chunk["payload"]).decode("utf-8"))
|
||||||
|
# decrypt the payload
|
||||||
|
payload_decrypted = AES.new(
|
||||||
|
key=self.keys.encryption, mode=AES.MODE_CBC, iv=base64.b64decode(payload_chunk["iv"])
|
||||||
|
).decrypt(base64.b64decode(payload_chunk["ciphertext"]))
|
||||||
|
payload_decrypted = Padding.unpad(payload_decrypted, 16)
|
||||||
|
payload_decrypted = json.loads(payload_decrypted.decode("utf-8"))
|
||||||
|
# decode and uncompress data if compressed
|
||||||
|
payload_data = base64.b64decode(payload_decrypted["data"])
|
||||||
|
if payload_decrypted.get("compressionalgo") == "GZIP":
|
||||||
|
payload_data = zlib.decompress(payload_data, 16 + zlib.MAX_WBITS)
|
||||||
|
raw_data += payload_data.decode("utf-8")
|
||||||
|
|
||||||
|
data = json.loads(raw_data)
|
||||||
|
if "error" in data:
|
||||||
|
error = data["error"]
|
||||||
|
error_display = error.get("display")
|
||||||
|
error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", ""))
|
||||||
|
|
||||||
|
# if error_display:
|
||||||
|
# self.log.critical(f"- {error_display}")
|
||||||
|
# if error_detail:
|
||||||
|
# self.log.critical(f"- {error_detail}")
|
||||||
|
|
||||||
|
if not (error_display or error_detail):
|
||||||
|
self.log.info(f"- {error}")
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return data["result"]
|
||||||
|
|
||||||
|
def parse_message(self, message):
|
||||||
|
"""
|
||||||
|
Parse an MSL message into a header and list of payload chunks
|
||||||
|
|
||||||
|
:param message: MSL message
|
||||||
|
:returns: a 2-item tuple containing message and list of payload chunks if available
|
||||||
|
"""
|
||||||
|
parsed_message = json.loads("[{}]".format(message.replace("}{", "},{")))
|
||||||
|
|
||||||
|
header = parsed_message[0]
|
||||||
|
encrypted_payload_chunks = parsed_message[1:] if len(parsed_message) > 1 else []
|
||||||
|
if encrypted_payload_chunks:
|
||||||
|
payload_chunks = self.decrypt_payload_chunks(encrypted_payload_chunks)
|
||||||
|
else:
|
||||||
|
payload_chunks = {}
|
||||||
|
|
||||||
|
return header, payload_chunks
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def gzip_compress(data):
|
||||||
|
out = BytesIO()
|
||||||
|
with gzip.GzipFile(fileobj=out, mode="w") as fd:
|
||||||
|
fd.write(data)
|
||||||
|
return base64.b64encode(out.getvalue())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def base64key_decode(payload):
|
||||||
|
length = len(payload) % 4
|
||||||
|
if length == 2:
|
||||||
|
payload += "=="
|
||||||
|
elif length == 3:
|
||||||
|
payload += "="
|
||||||
|
elif length != 0:
|
||||||
|
raise ValueError("Invalid base64 string")
|
||||||
|
return base64.urlsafe_b64decode(payload.encode("utf-8"))
|
||||||
|
|
||||||
|
def encrypt(self, plaintext):
|
||||||
|
"""
|
||||||
|
Encrypt the given Plaintext with the encryption key
|
||||||
|
:param plaintext:
|
||||||
|
:return: Serialized JSON String of the encryption Envelope
|
||||||
|
"""
|
||||||
|
iv = get_random_bytes(16)
|
||||||
|
return json.dumps(
|
||||||
|
{
|
||||||
|
"ciphertext": base64.b64encode(
|
||||||
|
AES.new(self.keys.encryption, AES.MODE_CBC, iv).encrypt(Padding.pad(plaintext.encode("utf-8"), 16))
|
||||||
|
).decode("utf-8"),
|
||||||
|
"keyid": "{}_{}".format(
|
||||||
|
self.sender,
|
||||||
|
json.loads(base64.b64decode(self.keys.mastertoken["tokendata"]).decode("utf-8"))["sequencenumber"],
|
||||||
|
),
|
||||||
|
"sha256": "AA==",
|
||||||
|
"iv": base64.b64encode(iv).decode("utf-8"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def sign(self, text):
|
||||||
|
"""
|
||||||
|
Calculates the HMAC signature for the given text with the current sign key and SHA256
|
||||||
|
:param text:
|
||||||
|
:return: Base64 encoded signature
|
||||||
|
"""
|
||||||
|
return base64.b64encode(HMAC.new(self.keys.sign, text.encode("utf-8"), SHA256).digest())
|
||||||
59
services/NF/MSL/schemes/EntityAuthentication.py
Normal file
59
services/NF/MSL/schemes/EntityAuthentication.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from . import EntityAuthenticationSchemes
|
||||||
|
from ..MSLObject import MSLObject
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
class EntityAuthentication(MSLObject):
|
||||||
|
def __init__(self, scheme, authdata):
|
||||||
|
"""
|
||||||
|
Data used to identify and authenticate the entity associated with a message.
|
||||||
|
https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29
|
||||||
|
|
||||||
|
:param scheme: Entity Authentication Scheme identifier
|
||||||
|
:param authdata: Entity Authentication data
|
||||||
|
"""
|
||||||
|
self.scheme = str(scheme)
|
||||||
|
self.authdata = authdata
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def Unauthenticated(cls, identity):
|
||||||
|
"""
|
||||||
|
The unauthenticated entity authentication scheme does not provide encryption or authentication and only
|
||||||
|
identifies the entity. Therefore entity identities can be harvested and spoofed. The benefit of this
|
||||||
|
authentication scheme is that the entity has control over its identity. This may be useful if the identity is
|
||||||
|
derived from or related to other data, or if retaining the identity is desired across state resets or in the
|
||||||
|
event of MSL errors requiring entity re-authentication.
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
scheme=EntityAuthenticationSchemes.Unauthenticated,
|
||||||
|
authdata={"identity": identity}
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def Widevine(cls, devtype, keyrequest):
|
||||||
|
"""
|
||||||
|
The Widevine entity authentication scheme is used by devices with the Widevine CDM. It does not provide
|
||||||
|
encryption or authentication and only identifies the entity. Therefore entity identities can be harvested
|
||||||
|
and spoofed. The entity identity is composed from the provided device type and Widevine key request data. The
|
||||||
|
Widevine CDM properties can be extracted from the key request data.
|
||||||
|
|
||||||
|
When coupled with the Widevine key exchange scheme, the entity identity can be cryptographically validated by
|
||||||
|
comparing the entity authentication key request data against the key exchange key request data.
|
||||||
|
|
||||||
|
Note that the local entity will not know its entity identity when using this scheme.
|
||||||
|
|
||||||
|
> Devtype
|
||||||
|
|
||||||
|
An arbitrary value identifying the device type the local entity wishes to assume. The data inside the Widevine
|
||||||
|
key request may be optionally used to validate the claimed device type.
|
||||||
|
|
||||||
|
:param devtype: Local entity device type
|
||||||
|
:param keyrequest: Widevine key request
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
scheme=EntityAuthenticationSchemes.Widevine,
|
||||||
|
authdata={
|
||||||
|
"devtype": devtype,
|
||||||
|
"keyrequest": keyrequest
|
||||||
|
}
|
||||||
|
)
|
||||||
80
services/NF/MSL/schemes/KeyExchangeRequest.py
Normal file
80
services/NF/MSL/schemes/KeyExchangeRequest.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import base64
|
||||||
|
|
||||||
|
from . import KeyExchangeSchemes
|
||||||
|
from ..MSLObject import MSLObject
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
class KeyExchangeRequest(MSLObject):
|
||||||
|
def __init__(self, scheme, keydata):
|
||||||
|
"""
|
||||||
|
Session key exchange data from a requesting entity.
|
||||||
|
https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29
|
||||||
|
|
||||||
|
:param scheme: Key Exchange Scheme identifier
|
||||||
|
:param keydata: Key Request data
|
||||||
|
"""
|
||||||
|
self.scheme = str(scheme)
|
||||||
|
self.keydata = keydata
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def AsymmetricWrapped(cls, keypairid, mechanism, publickey):
|
||||||
|
"""
|
||||||
|
Asymmetric wrapped key exchange uses a generated ephemeral asymmetric key pair for key exchange. It will
|
||||||
|
typically be used when there is no other data or keys from which to base secure key exchange.
|
||||||
|
|
||||||
|
This mechanism provides perfect forward secrecy but does not guarantee that session keys will only be available
|
||||||
|
to the requesting entity if the requesting MSL stack has been modified to perform the operation on behalf of a
|
||||||
|
third party.
|
||||||
|
|
||||||
|
> Key Pair ID
|
||||||
|
|
||||||
|
The key pair ID is included as a sanity check.
|
||||||
|
|
||||||
|
> Mechanism & Public Key
|
||||||
|
|
||||||
|
The following mechanisms are associated public key formats are currently supported.
|
||||||
|
|
||||||
|
Field Public Key Format Description
|
||||||
|
RSA SPKI RSA-OAEP encrypt/decrypt
|
||||||
|
ECC SPKI ECIES encrypt/decrypt
|
||||||
|
JWEJS_RSA SPKI RSA-OAEP JSON Web Encryption JSON Serialization
|
||||||
|
JWE_RSA SPKI RSA-OAEP JSON Web Encryption Compact Serialization
|
||||||
|
JWK_RSA SPKI RSA-OAEP JSON Web Key
|
||||||
|
JWK_RSAES SPKI RSA PKCS#1 JSON Web Key
|
||||||
|
|
||||||
|
:param keypairid: key pair ID
|
||||||
|
:param mechanism: asymmetric key type
|
||||||
|
:param publickey: public key
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
scheme=KeyExchangeSchemes.AsymmetricWrapped,
|
||||||
|
keydata={
|
||||||
|
"keypairid": keypairid,
|
||||||
|
"mechanism": mechanism,
|
||||||
|
"publickey": base64.b64encode(publickey).decode("utf-8")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def Widevine(cls, keyrequest):
|
||||||
|
"""
|
||||||
|
Google Widevine provides a secure key exchange mechanism. When requested the Widevine component will issue a
|
||||||
|
one-time use key request. The Widevine server library can be used to authenticate the request and return
|
||||||
|
randomly generated symmetric keys in a protected key response bound to the request and Widevine client library.
|
||||||
|
The key response also specifies the key identities, types and their permitted usage.
|
||||||
|
|
||||||
|
The Widevine key request also contains a model identifier and a unique device identifier with an expectation of
|
||||||
|
long-term persistence. These values are available from the Widevine client library and can be retrieved from
|
||||||
|
the key request by the Widevine server library.
|
||||||
|
|
||||||
|
The Widevine client library will protect the returned keys from inspection or misuse.
|
||||||
|
|
||||||
|
:param keyrequest: Base64-encoded Widevine CDM license challenge (PSSH: b'\x0A\x7A\x00\x6C\x38\x2B')
|
||||||
|
"""
|
||||||
|
if not isinstance(keyrequest, str):
|
||||||
|
keyrequest = base64.b64encode(keyrequest).decode()
|
||||||
|
return cls(
|
||||||
|
scheme=KeyExchangeSchemes.Widevine,
|
||||||
|
keydata={"keyrequest": keyrequest}
|
||||||
|
)
|
||||||
59
services/NF/MSL/schemes/UserAuthentication.py
Normal file
59
services/NF/MSL/schemes/UserAuthentication.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from ..MSLObject import MSLObject
|
||||||
|
from . import UserAuthenticationSchemes
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
class UserAuthentication(MSLObject):
|
||||||
|
def __init__(self, scheme, authdata):
|
||||||
|
"""
|
||||||
|
Data used to identify and authenticate the user associated with a message.
|
||||||
|
https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29
|
||||||
|
|
||||||
|
:param scheme: User Authentication Scheme identifier
|
||||||
|
:param authdata: User Authentication data
|
||||||
|
"""
|
||||||
|
self.scheme = str(scheme)
|
||||||
|
self.authdata = authdata
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def EmailPassword(cls, email, password):
|
||||||
|
"""
|
||||||
|
Email and password is a standard user authentication scheme in wide use.
|
||||||
|
|
||||||
|
:param email: user email address
|
||||||
|
:param password: user password
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
scheme=UserAuthenticationSchemes.EmailPassword,
|
||||||
|
authdata={
|
||||||
|
"email": email,
|
||||||
|
"password": password
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def NetflixIDCookies(cls, netflixid, securenetflixid):
|
||||||
|
"""
|
||||||
|
Netflix ID HTTP cookies are used when the user has previously logged in to a web site. Possession of the
|
||||||
|
cookies serves as proof of user identity, in the same manner as they do when communicating with the web site.
|
||||||
|
|
||||||
|
The Netflix ID cookie and Secure Netflix ID cookie are HTTP cookies issued by the Netflix web site after
|
||||||
|
subscriber login. The Netflix ID cookie is encrypted and identifies the subscriber and analogous to a
|
||||||
|
subscriber’s username. The Secure Netflix ID cookie is tied to a Netflix ID cookie and only sent over HTTPS
|
||||||
|
and analogous to a subscriber’s password.
|
||||||
|
|
||||||
|
In some cases the Netflix ID and Secure Netflix ID cookies will be unavailable to the MSL stack or application.
|
||||||
|
If either or both of the Netflix ID or Secure Netflix ID cookies are absent in the above data structure the
|
||||||
|
HTTP cookie headers will be queried for it; this is only acceptable when HTTPS is used as the underlying
|
||||||
|
transport protocol.
|
||||||
|
|
||||||
|
:param netflixid: Netflix ID cookie
|
||||||
|
:param securenetflixid: Secure Netflix ID cookie
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
scheme=UserAuthenticationSchemes.NetflixIDCookies,
|
||||||
|
authdata={
|
||||||
|
"netflixid": netflixid,
|
||||||
|
"securenetflixid": securenetflixid
|
||||||
|
}
|
||||||
|
)
|
||||||
24
services/NF/MSL/schemes/__init__.py
Normal file
24
services/NF/MSL/schemes/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class Scheme(Enum):
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.value)
|
||||||
|
|
||||||
|
|
||||||
|
class EntityAuthenticationSchemes(Scheme):
|
||||||
|
"""https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29"""
|
||||||
|
Unauthenticated = "NONE"
|
||||||
|
Widevine = "WIDEVINE"
|
||||||
|
|
||||||
|
|
||||||
|
class UserAuthenticationSchemes(Scheme):
|
||||||
|
"""https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29"""
|
||||||
|
EmailPassword = "EMAIL_PASSWORD"
|
||||||
|
NetflixIDCookies = "NETFLIXID"
|
||||||
|
|
||||||
|
|
||||||
|
class KeyExchangeSchemes(Scheme):
|
||||||
|
"""https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29"""
|
||||||
|
AsymmetricWrapped = "ASYMMETRIC_WRAPPED"
|
||||||
|
Widevine = "WIDEVINE"
|
||||||
781
services/NF/__init__.py
Normal file
781
services/NF/__init__.py
Normal file
@ -0,0 +1,781 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from langcodes import Language
|
||||||
|
from pymp4.parser import Box
|
||||||
|
from pywidevine.cdm import Cdm
|
||||||
|
from pywidevine.device import DeviceTypes
|
||||||
|
from pywidevine.pssh import PSSH
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.drm import Widevine
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T
|
||||||
|
from devine.core.tracks import Audio, Chapter, Chapters, Subtitle, Track, Tracks, Video
|
||||||
|
from devine.core.utils.collections import as_list, flatten
|
||||||
|
|
||||||
|
from .MSL import MSL
|
||||||
|
from .MSL.schemes import KeyExchangeSchemes
|
||||||
|
from .MSL.schemes.UserAuthentication import UserAuthentication
|
||||||
|
|
||||||
|
|
||||||
|
class NF(Service):
|
||||||
|
"""
|
||||||
|
Service code for the Netflix streaming service (https://netflix.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L1: 2160p
|
||||||
|
L3 Chrome: 720p, 1080p
|
||||||
|
L3 Android: 540p
|
||||||
|
PlayReady:
|
||||||
|
SL3: 2160p
|
||||||
|
SL2: 1080p
|
||||||
|
|
||||||
|
*MPL: FHD with Android L3, sporadically available with ChromeCDM
|
||||||
|
HPL: 1080p with ChromeCDM, 720p/1080p with other L3 (varies per title)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be either just title ID or URL:
|
||||||
|
devine dl -w s01e01 NF 80057281
|
||||||
|
devine dl -w s01e01 NF https://www.netflix.com/title/80057281
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Android CDM is currently not supported as the MSL Widevine KeyExchange is broken.
|
||||||
|
- The library of contents as well as regional availability is available at https://unogs.com
|
||||||
|
However, Do note that Netflix locked everyone out of being able to automate the available data
|
||||||
|
meaning the reliability and amount of information may be reduced.
|
||||||
|
- You could combine the information from https://unogs.com with https://justwatch.com for further data
|
||||||
|
|
||||||
|
TODO: Fix Widevine KeyExchange scheme
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("netflix",)
|
||||||
|
TITLE_RE = [
|
||||||
|
r"^(?:https?://(?:www\.)?netflix\.com(?:/[a-z0-9]{2})?/(?:title/|watch/|.+jbv=))?(?P<id>\d+)",
|
||||||
|
r"^https?://(?:www\.)?unogs\.com/title/(?P<id>\d+)",
|
||||||
|
]
|
||||||
|
|
||||||
|
NF_LANG_MAP = {
|
||||||
|
"es": "es-419",
|
||||||
|
"pt": "pt-PT",
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="NF", short_help="https://netflix.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.option(
|
||||||
|
"-p",
|
||||||
|
"--profile",
|
||||||
|
type=click.Choice(["MPL", "HPL", "MPL+HPL"], case_sensitive=False),
|
||||||
|
default="MPL+HPL",
|
||||||
|
help="H.264 profile to use. Default is best available.",
|
||||||
|
)
|
||||||
|
@click.option("--meta-lang", type=str, help="Language to use for metadata")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return NF(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title, profile, meta_lang):
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.parse_title(ctx, title)
|
||||||
|
self.profile = profile
|
||||||
|
self.meta_lang = meta_lang
|
||||||
|
|
||||||
|
if ctx.parent.params["proxy"] and len("".join(i for i in ctx.parent.params["proxy"] if not i.isdigit())) == 2:
|
||||||
|
self.GEOFENCE.append(ctx.parent.params["proxy"])
|
||||||
|
|
||||||
|
vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
self.vcodec = "H265" if vcodec and vcodec == Video.Codec.HEVC else "H264"
|
||||||
|
self.acodec = ctx.parent.params["acodec"]
|
||||||
|
self.range = ctx.parent.params["range_"][0].name
|
||||||
|
self.quality = ctx.parent.params["quality"]
|
||||||
|
self.audio_only = ctx.parent.params["audio_only"]
|
||||||
|
self.subs_only = ctx.parent.params["subs_only"]
|
||||||
|
self.chapters_only = ctx.parent.params["chapters_only"]
|
||||||
|
self.profiles = []
|
||||||
|
|
||||||
|
self.cdm = ctx.obj.cdm
|
||||||
|
if self.cdm.device_type == DeviceTypes.ANDROID:
|
||||||
|
self.log.error(
|
||||||
|
" - Android CDMs are currently not supported as the Widevine KeyExchange scheme is broken.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
self.user_profile = ctx.parent.params.get("profile")
|
||||||
|
if not self.user_profile:
|
||||||
|
self.user_profile = "default"
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not cookies:
|
||||||
|
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||||
|
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
self.log.info(f" + User profile: '{self.user_profile}'")
|
||||||
|
self.configure()
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
metadata = self.get_metadata(self.title)["video"]
|
||||||
|
if metadata["type"] == "movie":
|
||||||
|
movie = [
|
||||||
|
Movie(
|
||||||
|
id_=self.title,
|
||||||
|
name=metadata["title"],
|
||||||
|
year=metadata["year"],
|
||||||
|
service=self.__class__,
|
||||||
|
data=metadata
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return Movies(movie)
|
||||||
|
else:
|
||||||
|
episodes = [
|
||||||
|
episode
|
||||||
|
for season in [
|
||||||
|
[dict(x, **{"season": season["seq"]})
|
||||||
|
for x in season["episodes"]]
|
||||||
|
for season in metadata["seasons"]
|
||||||
|
]
|
||||||
|
for episode in season
|
||||||
|
]
|
||||||
|
titles = [
|
||||||
|
Episode(
|
||||||
|
id_=self.title,
|
||||||
|
title=metadata["title"],
|
||||||
|
year=metadata["seasons"][0].get("year"),
|
||||||
|
season=episode.get("season"),
|
||||||
|
number=episode.get("seq"),
|
||||||
|
name=episode.get("title"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
]
|
||||||
|
|
||||||
|
return Series(titles)
|
||||||
|
|
||||||
|
# TODO: Get original language without making an extra manifest request
|
||||||
|
# manifest = self.get_manifest(titles[0], self.profiles)
|
||||||
|
# original_language = self.get_original_language(manifest)
|
||||||
|
|
||||||
|
# for title in titles:
|
||||||
|
# title.original_lang = original_language
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
if self.vcodec == "H264":
|
||||||
|
# If H.264, get both MPL and HPL tracks as they alternate in terms of bitrate
|
||||||
|
tracks = Tracks()
|
||||||
|
|
||||||
|
self.config["profiles"]["video"]["H264"]["MPL+HPL+QC"] = (
|
||||||
|
self.config["profiles"]["video"]["H264"]["MPL"]
|
||||||
|
+ self.config["profiles"]["video"]["H264"]["HPL"]
|
||||||
|
+ self.config["profiles"]["video"]["H264"]["QC"]
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.audio_only or self.subs_only or self.chapters_only:
|
||||||
|
profiles = ["MPL+HPL+QC"]
|
||||||
|
else:
|
||||||
|
profiles = self.profile.split("+")
|
||||||
|
|
||||||
|
for profile in profiles:
|
||||||
|
try:
|
||||||
|
manifest = self.get_manifest(title, self.config["profiles"]["video"]["H264"][profile])
|
||||||
|
except Exception:
|
||||||
|
manifest = self.get_manifest(
|
||||||
|
title,
|
||||||
|
self.config["profiles"]["video"]["H264"]["MPL"]
|
||||||
|
+ self.config["profiles"]["video"]["H264"]["HPL"]
|
||||||
|
)
|
||||||
|
manifest_tracks = self.manifest_as_tracks(manifest)
|
||||||
|
license_url = manifest["links"]["license"]["href"]
|
||||||
|
|
||||||
|
if self.cdm.security_level == 3 and self.cdm.device_type == DeviceTypes.ANDROID:
|
||||||
|
max_quality = max(x.height for x in manifest_tracks.videos)
|
||||||
|
if profile == "MPL" and max_quality >= 720:
|
||||||
|
manifest_sd = self.get_manifest(title, self.config["profiles"]["video"]["H264"]["BPL"])
|
||||||
|
license_url_sd = manifest_sd["links"]["license"]["href"]
|
||||||
|
if "SD_LADDER" in manifest_sd["video_tracks"][0]["streams"][0]["tags"]:
|
||||||
|
# SD manifest is new encode encrypted with different keys that won't work for HD
|
||||||
|
continue
|
||||||
|
license_url = license_url_sd
|
||||||
|
if profile == "HPL" and max_quality >= 1080:
|
||||||
|
if "SEGMENT_MAP_2KEY" in manifest["video_tracks"][0]["streams"][0]["tags"]:
|
||||||
|
# 1080p license restricted from Android L3, 720p license will work for 1080p
|
||||||
|
manifest_720 = self.get_manifest(
|
||||||
|
title, [x for x in self.config["profiles"]["video"]["H264"]["HPL"] if "l40" not in x]
|
||||||
|
)
|
||||||
|
license_url = manifest_720["links"]["license"]["href"]
|
||||||
|
else:
|
||||||
|
# Older encode, can't use 720p keys for 1080p
|
||||||
|
continue
|
||||||
|
|
||||||
|
for track in manifest_tracks:
|
||||||
|
if track.drm:
|
||||||
|
track.data["license_url"] = license_url
|
||||||
|
tracks.add(manifest_tracks, warn_only=True)
|
||||||
|
return tracks
|
||||||
|
else:
|
||||||
|
manifest = self.get_manifest(title, self.profiles)
|
||||||
|
manifest_tracks = self.manifest_as_tracks(manifest)
|
||||||
|
license_url = manifest["links"]["license"]["href"]
|
||||||
|
for track in manifest_tracks:
|
||||||
|
if track.drm:
|
||||||
|
track.data["license_url"] = license_url
|
||||||
|
# if isinstance(track, Video):
|
||||||
|
# # TODO: Needs something better than this
|
||||||
|
# track.hdr10 = track.codec.split("-")[1] == "hdr" # hevc-hdr, vp9-hdr
|
||||||
|
# track.dv = track.codec.startswith("hevc-dv")
|
||||||
|
return manifest_tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
def _convert(total_seconds):
|
||||||
|
hours = total_seconds // 3600
|
||||||
|
minutes = (total_seconds % 3600) // 60
|
||||||
|
seconds = total_seconds % 60
|
||||||
|
# milliseconds = (total_seconds % 1) * 1000
|
||||||
|
return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}"
|
||||||
|
|
||||||
|
metadata = self.get_metadata(title.id)["video"]
|
||||||
|
|
||||||
|
if metadata["type"] == "movie":
|
||||||
|
episode = metadata
|
||||||
|
else:
|
||||||
|
season = next(x for x in metadata["seasons"] if x["seq"] == title.season)
|
||||||
|
episode = next(x for x in season["episodes"] if x["seq"] == title.number)
|
||||||
|
|
||||||
|
if not (episode.get("skipMarkers") and episode.get("creditsOffset")):
|
||||||
|
return []
|
||||||
|
|
||||||
|
chapters = {}
|
||||||
|
for item in episode["skipMarkers"]:
|
||||||
|
chapters[item] = {"start": 0, "end": 0}
|
||||||
|
if not episode["skipMarkers"][item]:
|
||||||
|
continue
|
||||||
|
if episode["skipMarkers"][item]["start"] is None:
|
||||||
|
chapters[item]["start"] = 0
|
||||||
|
else:
|
||||||
|
chapters[item]["start"] = episode["skipMarkers"][item]["start"] / 1000
|
||||||
|
if episode["skipMarkers"][item]["end"] is None:
|
||||||
|
chapters[item]["end"] = 0
|
||||||
|
else:
|
||||||
|
chapters[item]["end"] = episode["skipMarkers"][item]["end"] / 1000
|
||||||
|
|
||||||
|
cc, intro = 1, 0
|
||||||
|
chaps = [Chapter(timestamp="00:00:00.000")]
|
||||||
|
|
||||||
|
for item in chapters:
|
||||||
|
if chapters[item]["start"] != 0:
|
||||||
|
if intro == 0:
|
||||||
|
cc += 1
|
||||||
|
chaps.append(Chapter(name="Intro", timestamp=_convert(chapters[item]["start"])))
|
||||||
|
cc += 1
|
||||||
|
chaps.append(Chapter(timestamp=_convert(chapters[item]["end"])))
|
||||||
|
else:
|
||||||
|
cc += 1
|
||||||
|
chaps.append(Chapter(timestamp=_convert(chapters[item]["start"])))
|
||||||
|
cc += 1
|
||||||
|
chaps.append(Chapter(timestamp=_convert(chapters[item]["end"])))
|
||||||
|
cc += 1
|
||||||
|
|
||||||
|
if cc == 1:
|
||||||
|
chaps.append(Chapter(name="Credits", timestamp=_convert(episode["creditsOffset"])))
|
||||||
|
else:
|
||||||
|
chaps.append(Chapter(name="Credits", timestamp=_convert(episode["creditsOffset"])))
|
||||||
|
|
||||||
|
return chaps
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
return self.config["certificate"]
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, session_id: bytes, title: Title_T, track) -> None:
|
||||||
|
if not self.msl:
|
||||||
|
self.log.error(" - Cannot get license, MSL client has not been created yet.")
|
||||||
|
sys.exit(1)
|
||||||
|
header, payload_data = self.msl.send_message(
|
||||||
|
endpoint=self.config["endpoints"]["licence"],
|
||||||
|
params={},
|
||||||
|
application_data={
|
||||||
|
"version": 2,
|
||||||
|
"url": track.data["license_url"],
|
||||||
|
"id": int(time.time() * 10000),
|
||||||
|
"esn": self.esn,
|
||||||
|
"languages": ["en-US"],
|
||||||
|
"uiVersion": self.react_context["serverDefs"]["data"]["uiVersion"],
|
||||||
|
"clientVersion": "6.0026.291.011",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"sessionId": base64.b64encode(session_id).decode("utf-8"),
|
||||||
|
"clientTime": int(time.time()),
|
||||||
|
"challengeBase64": base64.b64encode(challenge).decode("utf-8"),
|
||||||
|
"xid": str(int((int(time.time()) + 0.1612) * 1000)),
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"echo": "sessionId",
|
||||||
|
},
|
||||||
|
userauthdata=self.userauthdata,
|
||||||
|
)
|
||||||
|
if not payload_data:
|
||||||
|
self.log.error(f" - Failed to get license: {header['message']} [{header['code']}]")
|
||||||
|
sys.exit(1)
|
||||||
|
if "error" in payload_data[0]:
|
||||||
|
error = payload_data[0]["error"]
|
||||||
|
error_display = error.get("display")
|
||||||
|
error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", ""))
|
||||||
|
|
||||||
|
if error_display:
|
||||||
|
self.log.critical(f" - {error_display}")
|
||||||
|
if error_detail:
|
||||||
|
self.log.critical(f" - {error_detail}")
|
||||||
|
|
||||||
|
if not (error_display or error_detail):
|
||||||
|
self.log.critical(f" - {error}")
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return payload_data[0]["licenseResponseBase64"]
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
self.session.headers.update({"Origin": "https://netflix.com"})
|
||||||
|
self.profiles = self.get_profiles()
|
||||||
|
self.esn = None
|
||||||
|
self.msl = None
|
||||||
|
self.userauthdata = None
|
||||||
|
self.log.info("Initializing a Netflix MSL client")
|
||||||
|
|
||||||
|
if self.cdm.device_type == DeviceTypes.CHROME:
|
||||||
|
self.esn = self.chrome_esn_generator()
|
||||||
|
else:
|
||||||
|
esn_map = self.config.get("esn_map", {})
|
||||||
|
self.esn = esn_map.get(self.cdm.system_id) or esn_map.get(str(self.cdm.system_id))
|
||||||
|
if not self.esn:
|
||||||
|
self.log.error(" - No ESN specified")
|
||||||
|
sys.exit(1)
|
||||||
|
self.log.info(f" + ESN: {self.esn}")
|
||||||
|
scheme = {
|
||||||
|
DeviceTypes.CHROME: KeyExchangeSchemes.AsymmetricWrapped,
|
||||||
|
DeviceTypes.ANDROID: KeyExchangeSchemes.Widevine,
|
||||||
|
}[self.cdm.device_type]
|
||||||
|
self.log.info(f" + Scheme: {scheme}")
|
||||||
|
self.msl = MSL.handshake(
|
||||||
|
scheme=scheme,
|
||||||
|
session=self.session,
|
||||||
|
endpoint=self.config["endpoints"]["manifest"],
|
||||||
|
sender=self.esn,
|
||||||
|
cdm=self.cdm,
|
||||||
|
msl_keys_path=self.cache.get(
|
||||||
|
"msl_{id}_{esn}_{scheme}_{profile}".format(
|
||||||
|
id=self.cdm.system_id,
|
||||||
|
esn=self.esn,
|
||||||
|
scheme=scheme,
|
||||||
|
profile=self.user_profile
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if not self.session.cookies:
|
||||||
|
self.log.error(" - No cookies provided, cannot log in.")
|
||||||
|
sys.exit(1)
|
||||||
|
if self.cdm.device_type == DeviceTypes.CHROME:
|
||||||
|
self.userauthdata = UserAuthentication.NetflixIDCookies(
|
||||||
|
netflixid=self.session.cookies.get_dict()["NetflixId"],
|
||||||
|
securenetflixid=self.session.cookies.get_dict()["SecureNetflixId"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if not self.credentials:
|
||||||
|
self.log.error(" - Credentials are required for Android CDMs, and none were provided.")
|
||||||
|
sys.exit(1)
|
||||||
|
# need to get cookies via an android-like way
|
||||||
|
# outdated
|
||||||
|
# self.android_login(credentials.username, credentials.password)
|
||||||
|
# need to use EmailPassword for userauthdata, it specifically checks for this
|
||||||
|
self.userauthdata = UserAuthentication.EmailPassword(
|
||||||
|
email=self.credentials.username, password=self.credentials.password
|
||||||
|
)
|
||||||
|
self.react_context = self.get_react_context()
|
||||||
|
|
||||||
|
def get_profiles(self):
|
||||||
|
if self.range in ("HDR10", "DV") and self.vcodec not in ("H265", "VP9"):
|
||||||
|
self.vcodec = "H265"
|
||||||
|
profiles = self.config["profiles"]["video"][self.vcodec]
|
||||||
|
if self.range and self.range in profiles:
|
||||||
|
return profiles[self.range]
|
||||||
|
return profiles
|
||||||
|
|
||||||
|
def get_react_context(self):
|
||||||
|
"""Netflix uses a "BUILD_IDENTIFIER" value on some API's, e.g. the Shakti (metadata) API.
|
||||||
|
This value isn't given to the user through normal means so REGEX is needed.
|
||||||
|
It's obtained by grabbing the body of a logged-in netflix homepage.
|
||||||
|
The value changes often but doesn't often matter if it's only a bit out of date.
|
||||||
|
|
||||||
|
It also uses a Client Version for various MPL calls.
|
||||||
|
|
||||||
|
:returns: reactContext parsed json-loaded dictionary
|
||||||
|
"""
|
||||||
|
cached_context = self.cache.get(f"data_{self.user_profile}")
|
||||||
|
|
||||||
|
if not cached_context:
|
||||||
|
src = self.session.get("https://www.netflix.com/browse").text
|
||||||
|
match = re.search(r"netflix\.reactContext = ({.+});</script><script>window\.", src, re.MULTILINE)
|
||||||
|
if not match:
|
||||||
|
self.log.error(" - Failed to retrieve reactContext data, cookies might be outdated.")
|
||||||
|
sys.exit(1)
|
||||||
|
react_context_raw = match.group(1)
|
||||||
|
react_context = json.loads(re.sub(r"\\x", r"\\u00", react_context_raw))["models"]
|
||||||
|
react_context["requestHeaders"]["data"] = {
|
||||||
|
re.sub(r"\B([A-Z])", r"-\1", k): str(v) for k, v in react_context["requestHeaders"]["data"].items()
|
||||||
|
}
|
||||||
|
react_context["abContext"]["data"]["headers"] = {
|
||||||
|
k: str(v) for k, v in react_context["abContext"]["data"]["headers"].items()
|
||||||
|
}
|
||||||
|
react_context["requestHeaders"]["data"] = {
|
||||||
|
k: str(v) for k, v in react_context["requestHeaders"]["data"].items()
|
||||||
|
}
|
||||||
|
# react_context["playerModel"]["data"]["config"]["core"]["initParams"]["clientVersion"] = (
|
||||||
|
# react_context["playerModel"]["data"]["config"]["core"]["assets"]["core"].split("-")[-1][:-3]
|
||||||
|
# )
|
||||||
|
cached_context.set(react_context)
|
||||||
|
return cached_context.data
|
||||||
|
|
||||||
|
return cached_context.data
|
||||||
|
|
||||||
|
def get_metadata(self, title_id):
|
||||||
|
"""
|
||||||
|
Obtain Metadata information about a title by it's ID.
|
||||||
|
:param title_id: Title's ID.
|
||||||
|
:returns: Title Metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Wip non-working code for the newer shakti metadata replacement
|
||||||
|
metadata = self.session.post(
|
||||||
|
url=self.config["endpoints"]["website"].format(
|
||||||
|
build_id=self.react_context["serverDefs"]["data"]["BUILD_IDENTIFIER"]
|
||||||
|
),
|
||||||
|
params={
|
||||||
|
# features
|
||||||
|
"webp": self.react_context["browserInfo"]["data"]["features"]["webp"],
|
||||||
|
"drmSystem": self.config["configuration"]["drm_system"],
|
||||||
|
# truths
|
||||||
|
"isVolatileBillboardsEnabled": self.react_context["truths"]["data"]["volatileBillboardsEnabled"],
|
||||||
|
"routeAPIRequestsThroughFTL": self.react_context["truths"]["data"]["routeAPIRequestsThroughFTL"],
|
||||||
|
"isTop10Supported": self.react_context["truths"]["data"]["isTop10Supported"],
|
||||||
|
"categoryCraversEnabled": self.react_context["truths"]["data"]["categoryCraversEnabled"],
|
||||||
|
"hasVideoMerchInBob": self.react_context["truths"]["data"]["hasVideoMerchInBob"],
|
||||||
|
"persoInfoDensity": self.react_context["truths"]["data"]["enablePersoInfoDensityToggle"],
|
||||||
|
"contextAwareImages": self.react_context["truths"]["data"]["contextAwareImages"],
|
||||||
|
# ?
|
||||||
|
"falcor_server": "0.1.0",
|
||||||
|
"withSize": True,
|
||||||
|
"materialize": True,
|
||||||
|
"original_path": quote_plus(
|
||||||
|
f"/shakti/{self.react_context['serverDefs']['data']['BUILD_IDENTIFIER']}/pathEvaluator"
|
||||||
|
)
|
||||||
|
},
|
||||||
|
headers=dict(
|
||||||
|
**self.react_context["abContext"]["data"]["headers"],
|
||||||
|
**{
|
||||||
|
"X-Netflix.Client.Request.Name": "ui/falcorUnclassified",
|
||||||
|
"X-Netflix.esn": self.react_context["esnGeneratorModel"]["data"]["esn"],
|
||||||
|
"x-netflix.nq.stack": self.react_context["serverDefs"]["data"]["stack"],
|
||||||
|
"x-netflix.request.client.user.guid": (
|
||||||
|
self.react_context["memberContext"]["data"]["userInfo"]["guid"]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
**self.react_context["requestHeaders"]["data"]
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"path": json.dumps([
|
||||||
|
[
|
||||||
|
"videos",
|
||||||
|
70155547,
|
||||||
|
[
|
||||||
|
"bobSupplementalMessage",
|
||||||
|
"bobSupplementalMessageIcon",
|
||||||
|
"bookmarkPosition",
|
||||||
|
"delivery",
|
||||||
|
"displayRuntime",
|
||||||
|
"evidence",
|
||||||
|
"hasSensitiveMetadata",
|
||||||
|
"interactiveBookmark",
|
||||||
|
"maturity",
|
||||||
|
"numSeasonsLabel",
|
||||||
|
"promoVideo",
|
||||||
|
"releaseYear",
|
||||||
|
"seasonCount",
|
||||||
|
"title",
|
||||||
|
"userRating",
|
||||||
|
"userRatingRequestId",
|
||||||
|
"watched"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"videos",
|
||||||
|
70155547,
|
||||||
|
"seasonList",
|
||||||
|
"current",
|
||||||
|
"summary"
|
||||||
|
]
|
||||||
|
]),
|
||||||
|
"authURL": self.react_context["memberContext"]["data"]["userInfo"]["authURL"]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
print(metadata.headers)
|
||||||
|
print(metadata.text)
|
||||||
|
exit()
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata = self.session.get(
|
||||||
|
self.config["endpoints"]["metadata"].format(
|
||||||
|
build_id=self.react_context["serverDefs"]["data"]["BUILD_IDENTIFIER"]
|
||||||
|
),
|
||||||
|
params={
|
||||||
|
"movieid": title_id,
|
||||||
|
"drmSystem": self.config["configuration"]["drm_system"],
|
||||||
|
"isWatchlistEnabled": False,
|
||||||
|
"isShortformEnabled": False,
|
||||||
|
"isVolatileBillboardsEnabled": self.react_context["truths"]["data"]["volatileBillboardsEnabled"],
|
||||||
|
"languages": self.meta_lang,
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
except requests.HTTPError as e:
|
||||||
|
if e.response.status_code == 500:
|
||||||
|
self.log.warning(
|
||||||
|
" - Recieved a HTTP 500 error while getting metadata, deleting cached reactContext data"
|
||||||
|
)
|
||||||
|
os.unlink(self.cache.get("web_data.json"))
|
||||||
|
return self.get_metadata(self, title_id)
|
||||||
|
raise
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
self.log.error(" - Failed to get metadata, title might not be available in your region.")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
if "status" in metadata and metadata["status"] == "error":
|
||||||
|
self.log.error(f" - Failed to get metadata, cookies might be expired. ({metadata['message']})")
|
||||||
|
sys.exit(1)
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
def get_manifest(self, title, video_profiles):
|
||||||
|
if isinstance(video_profiles, dict):
|
||||||
|
video_profiles = list(video_profiles.values())
|
||||||
|
if self.quality == 720:
|
||||||
|
# NF only returns lower quality 720p streams if 1080p is also requested
|
||||||
|
video_profiles = [x for x in video_profiles if "l40" not in x]
|
||||||
|
audio_profiles = self.config["profiles"]["audio"]
|
||||||
|
if self.acodec:
|
||||||
|
audio_profiles = audio_profiles[self.acodec]
|
||||||
|
if isinstance(audio_profiles, dict):
|
||||||
|
audio_profiles = list(audio_profiles.values())
|
||||||
|
profiles = sorted(set(flatten(as_list(
|
||||||
|
# as list then flatten in case any of these profiles are a list of lists
|
||||||
|
# list(set()) used to remove any potential duplicates
|
||||||
|
self.config["profiles"]["video"]["H264"]["BPL"], # always required for some reason
|
||||||
|
video_profiles,
|
||||||
|
audio_profiles,
|
||||||
|
self.config["profiles"]["subtitles"],
|
||||||
|
))))
|
||||||
|
self.log.debug("Profiles:\n\t" + "\n\t".join(profiles))
|
||||||
|
|
||||||
|
params = {}
|
||||||
|
if self.cdm.device_type == DeviceTypes.CHROME:
|
||||||
|
params = {
|
||||||
|
"reqAttempt": 1,
|
||||||
|
"reqPriority": 10,
|
||||||
|
"reqName": "manifest",
|
||||||
|
"clienttype": self.react_context["playerModel"]["data"]["config"]["ui"]["initParams"]["uimode"],
|
||||||
|
"uiversion": self.react_context["serverDefs"]["data"]["BUILD_IDENTIFIER"],
|
||||||
|
# "browsername": self.react_context["playerModel"]["data"]["config"]["core"]["initParams"]["browserInfo"][
|
||||||
|
# "name"],
|
||||||
|
# "browserversion":
|
||||||
|
# self.react_context["playerModel"]["data"]["config"]["core"]["initParams"]["browserInfo"]["version"],
|
||||||
|
# "osname":
|
||||||
|
# self.react_context["playerModel"]["data"]["config"]["core"]["initParams"]["browserInfo"]["os"][
|
||||||
|
# "name"],
|
||||||
|
# "osversion":
|
||||||
|
# self.react_context["playerModel"]["data"]["config"]["core"]["initParams"]["browserInfo"]["os"][
|
||||||
|
# "version"]
|
||||||
|
}
|
||||||
|
|
||||||
|
_, payload_chunks = self.msl.send_message(
|
||||||
|
endpoint=self.config["endpoints"]["manifest"],
|
||||||
|
params=params,
|
||||||
|
application_data={
|
||||||
|
"version": 2,
|
||||||
|
"url": "/manifest",
|
||||||
|
"id": int(time.time()),
|
||||||
|
"esn": self.esn,
|
||||||
|
"languages": ["en-US"],
|
||||||
|
"uiVersion": self.react_context["playerModel"]["data"]["config"]["ui"]["initParams"]["uiVersion"],
|
||||||
|
"clientVersion": "6.0026.291.011",
|
||||||
|
"params": {
|
||||||
|
"type": "standard", # ? PREPARE
|
||||||
|
"viewableId": title.data.get("episodeId", title.data["id"]),
|
||||||
|
"profiles": profiles,
|
||||||
|
"flavor": "STANDARD", # ? PRE_FETCH, SUPPLEMENTAL
|
||||||
|
"drmType": self.config["configuration"]["drm_system"],
|
||||||
|
"drmVersion": self.config["configuration"]["drm_version"],
|
||||||
|
"usePsshBox": True,
|
||||||
|
"isBranching": False, # ? possibly for interactive titles like Minecraft Story
|
||||||
|
"useHttpsStreams": True,
|
||||||
|
"supportsUnequalizedDownloadables": True, # ?
|
||||||
|
"imageSubtitleHeight": 1080,
|
||||||
|
"uiVersion": self.react_context["playerModel"]["data"]["config"]["ui"]["initParams"]["uiVersion"],
|
||||||
|
"uiPlatform": self.react_context["playerModel"]["data"]["config"]["ui"]["initParams"]["uiPlatform"],
|
||||||
|
"clientVersion": "6.0026.291.011",
|
||||||
|
"supportsPreReleasePin": True, # ?
|
||||||
|
"supportsWatermark": True, # ?
|
||||||
|
"showAllSubDubTracks": True,
|
||||||
|
"videoOutputInfo": [
|
||||||
|
{
|
||||||
|
# todo ; make this return valid, but "secure" values, maybe it helps
|
||||||
|
"type": "DigitalVideoOutputDescriptor",
|
||||||
|
"outputType": "unknown",
|
||||||
|
"supportedHdcpVersions": self.config["configuration"]["supported_hdcp_versions"],
|
||||||
|
"isHdcpEngaged": self.config["configuration"]["is_hdcp_engaged"],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"titleSpecificData": {title.data.get("episodeId", title.data["id"]): {"unletterboxed": True}},
|
||||||
|
"preferAssistiveAudio": False,
|
||||||
|
"isUIAutoPlay": False,
|
||||||
|
"isNonMember": False,
|
||||||
|
"challenge": self.config["payload_challenge"],
|
||||||
|
# "desiredVmaf": "plus_lts", # ?
|
||||||
|
# "maxSupportedLanguages": 2, # ?
|
||||||
|
},
|
||||||
|
},
|
||||||
|
userauthdata=self.userauthdata,
|
||||||
|
)
|
||||||
|
if "errorDetails" in payload_chunks:
|
||||||
|
raise Exception(f"Manifest call failed: {payload_chunks['errorDetails']}")
|
||||||
|
return payload_chunks
|
||||||
|
|
||||||
|
def manifest_as_tracks(self, manifest):
|
||||||
|
# filter audio_tracks so that each stream is an entry instead of each track
|
||||||
|
manifest["audio_tracks"] = [
|
||||||
|
x for y in [[dict(t, **d)for d in t["streams"]] for t in manifest["audio_tracks"]] for x in y
|
||||||
|
]
|
||||||
|
|
||||||
|
tracks = Tracks()
|
||||||
|
for x in manifest["video_tracks"][0]["streams"]:
|
||||||
|
_pssh = Box.parse(Box.build(dict(
|
||||||
|
type=b"pssh",
|
||||||
|
version=0,
|
||||||
|
flags=0,
|
||||||
|
system_ID=Cdm.uuid,
|
||||||
|
init_data=b"\x12\x10" + UUID(hex=x["drmHeaderId"]).bytes
|
||||||
|
))) if x.get("drmHeaderId") else None
|
||||||
|
|
||||||
|
tracks.add(
|
||||||
|
Video(
|
||||||
|
id_=x["downloadable_id"],
|
||||||
|
url=x["urls"][0]["url"],
|
||||||
|
codec=Video.Codec.from_netflix_profile(x["content_profile"]),
|
||||||
|
bitrate=x["bitrate"] * 1000,
|
||||||
|
width=x["res_w"],
|
||||||
|
height=x["res_h"],
|
||||||
|
fps=(float(x["framerate_value"]) / x["framerate_scale"]) if "framerate_value" in x else None,
|
||||||
|
language=self.get_original_language(manifest),
|
||||||
|
needs_repack=False,
|
||||||
|
drm=[Widevine(pssh=PSSH(_pssh))] if _pssh else None,
|
||||||
|
descriptor=Track.Descriptor.URL,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for x in manifest["audio_tracks"]:
|
||||||
|
_pssh = Box.parse(base64.b64decode(x["drmHeader"]["bytes"])) if x.get("drmHeader") else None
|
||||||
|
|
||||||
|
tracks.add(
|
||||||
|
Audio(
|
||||||
|
id_=x["downloadable_id"],
|
||||||
|
url=x["urls"][0]["url"],
|
||||||
|
codec=Audio.Codec.from_netflix_profile(x["content_profile"]),
|
||||||
|
language=self.NF_LANG_MAP.get(x["language"], x["language"]),
|
||||||
|
bitrate=x["bitrate"] * 1000,
|
||||||
|
channels=x["channels"],
|
||||||
|
descriptive=x.get("rawTrackType", "").lower() == "assistive",
|
||||||
|
needs_repack=False,
|
||||||
|
drm=[Widevine(pssh=PSSH(_pssh))] if _pssh else None,
|
||||||
|
descriptor=Track.Descriptor.URL,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for x in manifest["timedtexttracks"]:
|
||||||
|
if not x["isNoneTrack"]:
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=list(x["downloadableIds"].values())[0],
|
||||||
|
url=next(iter(next(iter(x["ttDownloadables"].values()))["downloadUrls"].values())),
|
||||||
|
codec=Subtitle.Codec.from_netflix_profile(next(iter(x["ttDownloadables"].keys()))),
|
||||||
|
language=self.NF_LANG_MAP.get(x["language"], x["language"]),
|
||||||
|
forced=x["isForcedNarrative"],
|
||||||
|
sdh=x["rawTrackType"] == "closedcaptions",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def chrome_esn_generator(self):
|
||||||
|
esn_gen = "NFCDCH-02-" + "".join(random.choice("0123456789ABCDEF") for _ in range(30))
|
||||||
|
|
||||||
|
esn_cache = self.cache.get(f"chrome_esn_{self.user_profile}")
|
||||||
|
|
||||||
|
if esn_cache and not esn_cache.expired:
|
||||||
|
self.log.info("ESN found in cache")
|
||||||
|
esn = esn_cache.data.get("esn")
|
||||||
|
elif esn_cache and esn_cache.expired:
|
||||||
|
self.log.info("ESN expired, Generating a new Chrome ESN")
|
||||||
|
esn_cache.set({"esn": esn_gen}, expiration=int(timedelta(hours=6).total_seconds()))
|
||||||
|
esn = esn_cache.data.get("esn")
|
||||||
|
else:
|
||||||
|
self.log.info("Generating a new Chrome ESN")
|
||||||
|
esn_cache.set({"esn": esn_gen}, expiration=int(timedelta(hours=6).total_seconds()))
|
||||||
|
esn = esn_cache.data.get("esn")
|
||||||
|
|
||||||
|
return esn
|
||||||
|
|
||||||
|
def parse_title(self, ctx, title) -> dict | None:
|
||||||
|
title = title or ctx.parent.params.get("title")
|
||||||
|
if not title:
|
||||||
|
self.log.error(" - No title ID specified")
|
||||||
|
sys.exit(1)
|
||||||
|
if not getattr(self, "TITLE_RE"):
|
||||||
|
self.title = title
|
||||||
|
return {}
|
||||||
|
for regex in as_list(self.TITLE_RE):
|
||||||
|
m = re.search(regex, title)
|
||||||
|
if m:
|
||||||
|
self.title = m.group("id")
|
||||||
|
return m.groupdict()
|
||||||
|
self.log.warning(f" - Unable to parse title ID {title!r}, using as-is")
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_original_language(manifest):
|
||||||
|
for language in manifest["audio_tracks"]:
|
||||||
|
if language["languageDescription"].endswith(" [Original]"):
|
||||||
|
return Language.get(language["language"])
|
||||||
|
# e.g. get `en` from "A:1:1;2;en;0;|V:2:1;[...]"
|
||||||
|
return Language.get(manifest["defaultTrackOrderList"][0]["mediaId"].split(";")[2])
|
||||||
185
services/NF/config.yaml
Normal file
185
services/NF/config.yaml
Normal file
File diff suppressed because one or more lines are too long
384
services/NOW/__init__.py
Normal file
384
services/NOW/__init__.py
Normal file
@ -0,0 +1,384 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T
|
||||||
|
from devine.core.tracks import Chapters, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class NOW(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for Now TV's streaming service (https://nowtv.com)
|
||||||
|
Only UK is currently supported
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L1: 2160p, 1080p, DDP5.1
|
||||||
|
L3: 720p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be the slug of the title, e.g.:
|
||||||
|
/house-of-the-dragon/iYEQZ2rcf32XRKvQ5gm2Aq
|
||||||
|
/five-nights-at-freddys-2023/A5EK6sKrAaye7uXVJ57V7
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("nowtv",)
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="NOW", short_help="https://nowtv.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return NOW(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not cookies:
|
||||||
|
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||||
|
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
self.persona_id = self.persona()
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
headers = {
|
||||||
|
"x-skyott-device": self.config["client"]["device"],
|
||||||
|
"x-skyott-language": "en",
|
||||||
|
"x-skyott-platform": self.config["client"]["platform"],
|
||||||
|
"x-skyott-proposition": self.config["client"]["proposition"],
|
||||||
|
"x-skyott-provider": self.config["client"]["provider"],
|
||||||
|
"x-skyott-territory": self.config["client"]["territory"],
|
||||||
|
}
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"term": self.title,
|
||||||
|
"limit": "30",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"], params=params, headers=headers)
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.log.error(r.text)
|
||||||
|
return
|
||||||
|
|
||||||
|
for result in r.json()["search"]["results"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("slug"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("description"),
|
||||||
|
label=result["channel"].get("name"),
|
||||||
|
url="https://www.nowtv.com/gb/watch/home/asset" + result.get("slug"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
if not self.title.startswith("/"):
|
||||||
|
self.title = "/" + self.title
|
||||||
|
|
||||||
|
res = self.session.get(
|
||||||
|
url=self.config["endpoints"]["node"],
|
||||||
|
params={"slug": self.title, "represent": "(items(items))"},
|
||||||
|
headers={
|
||||||
|
"Accept": "*",
|
||||||
|
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||||
|
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||||
|
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||||
|
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||||
|
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
if "MOVIES" in res["attributes"].get("classification", ""):
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=self.title,
|
||||||
|
name=res["attributes"]["title"],
|
||||||
|
year=res["attributes"]["year"],
|
||||||
|
service=self.__class__,
|
||||||
|
language="en-GB",
|
||||||
|
data=res,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
titles = [
|
||||||
|
episode
|
||||||
|
for season in res["relationships"]["items"]["data"]
|
||||||
|
for episode in season["relationships"]["items"]["data"]
|
||||||
|
]
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=self.title,
|
||||||
|
title=res["attributes"]["title"],
|
||||||
|
year=episode["attributes"].get("year"),
|
||||||
|
season=episode["attributes"].get("seasonNumber", 0),
|
||||||
|
number=episode["attributes"].get("episodeNumber", 0),
|
||||||
|
name=episode["attributes"].get("title"),
|
||||||
|
service=self.__class__,
|
||||||
|
language="en-GB",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in titles
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movies, Series]) -> Tracks:
|
||||||
|
variant_id = title.data["attributes"]["providerVariantId"]
|
||||||
|
url = self.config["endpoints"]["vod"]
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"accept": "application/vnd.playvod.v1+json",
|
||||||
|
"content-type": "application/vnd.playvod.v1+json",
|
||||||
|
"x-skyott-activeterritory": self.config["client"]["territory"],
|
||||||
|
"x-skyott-device": self.config["client"]["device"],
|
||||||
|
"x-skyott-platform": self.config["client"]["platform"],
|
||||||
|
"x-skyott-proposition": self.config["client"]["proposition"],
|
||||||
|
"x-skyott-provider": self.config["client"]["provider"],
|
||||||
|
"x-skyott-territory": self.config["client"]["territory"],
|
||||||
|
"x-skyott-usertoken": self.get_token(),
|
||||||
|
}
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"device": {
|
||||||
|
"capabilities": [
|
||||||
|
# H265 EAC3
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"container": "TS",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"container": "MP4",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"transport": "DASH",
|
||||||
|
},
|
||||||
|
# H264 EAC3
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"container": "TS",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"container": "MP4",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "EAC3",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"transport": "DASH",
|
||||||
|
},
|
||||||
|
# H265 AAC
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"container": "TS",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"container": "MP4",
|
||||||
|
"vcodec": "H265",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"transport": "DASH",
|
||||||
|
},
|
||||||
|
# H264 AAC
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"container": "TS",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"transport": "DASH",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"container": "MP4",
|
||||||
|
"vcodec": "H264",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"transport": "DASH",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"model": self.config["client"]["model"],
|
||||||
|
"maxVideoFormat": "SD", # "HD", "UHD"
|
||||||
|
"hdcpEnabled": "false",
|
||||||
|
"supportedColourSpaces": ["DV", "HDR10", "SDR"],
|
||||||
|
},
|
||||||
|
"providerVariantId": variant_id,
|
||||||
|
"parentalControlPin": "null",
|
||||||
|
}
|
||||||
|
|
||||||
|
data = json.dumps(data)
|
||||||
|
headers["x-sky-signature"] = self.calculate_signature("POST", url, headers, data)
|
||||||
|
|
||||||
|
response = self.session.post(url, headers=headers, data=data).json()
|
||||||
|
if response.get("errorCode"):
|
||||||
|
self.log.error(response.get("description"))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
manifest = response["asset"]["endpoints"][0]["url"]
|
||||||
|
self.license = response["protection"]["licenceAcquisitionUrl"]
|
||||||
|
locale = response["asset"].get("audioTracks", [])[0].get("locale", "en-GB")
|
||||||
|
|
||||||
|
tracks = DASH.from_url(url=manifest, session=self.session).to_tracks(language=locale)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None # WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
r = requests.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# service specific functions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_sky_header(headers: dict) -> str:
|
||||||
|
text_headers = ""
|
||||||
|
for key in sorted(headers.keys()):
|
||||||
|
if key.lower().startswith("x-skyott"):
|
||||||
|
text_headers += key + ": " + headers[key] + "\n"
|
||||||
|
return hashlib.md5(text_headers.encode()).hexdigest()
|
||||||
|
|
||||||
|
def calculate_signature(self, method: str, url: str, headers: dict, payload: str) -> str:
|
||||||
|
to_hash = (
|
||||||
|
"{method}\n{path}\n{response_code}\n{app_id}\n{version}\n{headers_md5}\n" "{timestamp}\n{payload_md5}\n"
|
||||||
|
).format(
|
||||||
|
method=method,
|
||||||
|
path=urlparse(url).path if url.startswith("http") else url,
|
||||||
|
response_code="",
|
||||||
|
app_id=self.config["client"]["client_sdk"],
|
||||||
|
version="1.0",
|
||||||
|
headers_md5=self.calculate_sky_header(headers),
|
||||||
|
timestamp=int(time.time()),
|
||||||
|
payload_md5=hashlib.md5(payload.encode()).hexdigest(),
|
||||||
|
)
|
||||||
|
|
||||||
|
signature_key = bytes(self.config["security"]["signature_hmac_key_v4"], "utf-8")
|
||||||
|
hashed = hmac.new(signature_key, to_hash.encode("utf8"), hashlib.sha1).digest()
|
||||||
|
signature_hmac = base64.b64encode(hashed).decode("utf8")
|
||||||
|
|
||||||
|
return self.config["security"]["signature_format"].format(
|
||||||
|
client=self.config["client"]["client_sdk"], signature=signature_hmac, timestamp=int(time.time())
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_token(self) -> str:
|
||||||
|
url = self.config["endpoints"]["tokens"]
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"accept": "application/vnd.tokens.v1+json",
|
||||||
|
"content-type": "application/vnd.tokens.v1+json",
|
||||||
|
"x-skyott-device": self.config["client"]["device"],
|
||||||
|
"x-skyott-platform": self.config["client"]["platform"],
|
||||||
|
"x-skyott-proposition": self.config["client"]["proposition"],
|
||||||
|
"x-skyott-provider": self.config["client"]["provider"],
|
||||||
|
"x-skyott-territory": self.config["client"]["territory"],
|
||||||
|
}
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"auth": {
|
||||||
|
"authScheme": self.config["client"]["auth_scheme"],
|
||||||
|
"authToken": self.session.cookies.get("skyCEsidismesso01"),
|
||||||
|
"authIssuer": self.config["client"]["auth_issuer"],
|
||||||
|
"personaId": self.persona_id,
|
||||||
|
"provider": self.config["client"]["provider"],
|
||||||
|
"providerTerritory": self.config["client"]["territory"],
|
||||||
|
"proposition": self.config["client"]["proposition"],
|
||||||
|
},
|
||||||
|
"device": {
|
||||||
|
"type": self.config["client"]["device"],
|
||||||
|
"platform": self.config["client"]["platform"],
|
||||||
|
"id": self.config["client"]["id"],
|
||||||
|
"drmDeviceId": self.config["client"]["drm_device_id"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
data = json.dumps(data)
|
||||||
|
headers["Content-MD5"] = hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
|
response = self.session.post(url, headers=headers, data=data).json()
|
||||||
|
if response.get("message"):
|
||||||
|
self.log.error(f"{response['message']}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return response["userToken"]
|
||||||
|
|
||||||
|
def persona(self):
|
||||||
|
headers = {
|
||||||
|
"accept": "application/vnd.persona.v1+json",
|
||||||
|
"x-skyid-token": self.session.cookies.get("skyCEsidismesso01"),
|
||||||
|
"x-skyott-device": self.config["client"]["device"],
|
||||||
|
"x-skyott-platform": self.config["client"]["platform"],
|
||||||
|
"x-skyott-proposition": "NOWTV",
|
||||||
|
"x-skyott-provider": "NOWTV",
|
||||||
|
"x-skyott-territory": self.config["client"]["territory"],
|
||||||
|
"x-skyott-tokentype": "SSO",
|
||||||
|
}
|
||||||
|
response = self.session.get(self.config["endpoints"]["personas"], headers=headers).json()
|
||||||
|
if response.get("message"):
|
||||||
|
self.log.error(f"{response['message']} - Cookies may have expired")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return response["personas"][0]["personaId"]
|
||||||
27
services/NOW/config.yaml
Normal file
27
services/NOW/config.yaml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
endpoints:
|
||||||
|
login: 'https://uiapi.id.nowtv.com/signin/service/international'
|
||||||
|
config: 'https://www.nowtv.com/international/static/b956eb3/config/{territory}/nowtv/nowtv/web/production/config.json'
|
||||||
|
personas: 'https://persona-store.sky.com/persona-store/personas'
|
||||||
|
tokens: 'https://auth.client.ott.sky.com/auth/tokens'
|
||||||
|
me: 'https://p.sky.com/auth/users/me'
|
||||||
|
node: 'https://ie.api.atom.nowtv.com/adapter-atlas/v3/query/node/'
|
||||||
|
vod: 'https://p.sky.com/video/playouts/vod'
|
||||||
|
search: 'https://clip.search.sky.com/bff/search/v1'
|
||||||
|
|
||||||
|
client:
|
||||||
|
config_version: '1.0.8'
|
||||||
|
territory: 'GB'
|
||||||
|
provider: 'NOWTV'
|
||||||
|
proposition: 'NOWTV'
|
||||||
|
platform: 'ANDROIDTV' # PC, ANDROID, ANDROIDTV
|
||||||
|
model: 'Nvidia Shield Android TV'
|
||||||
|
device: 'TV' # COMPUTER, TABLET, TV,
|
||||||
|
id: 'Z-sKxKApCe7c3dBMGAYtKU8NmWKDcWrCKobKpnVTLqc'
|
||||||
|
drm_device_id: 'UNKNOWN'
|
||||||
|
client_sdk: 'IE-NOWTV-ANDROID-v1' # NOWTV-WEBPLAYER-v2
|
||||||
|
auth_scheme: 'MESSO'
|
||||||
|
auth_issuer: 'NOWTV'
|
||||||
|
|
||||||
|
security:
|
||||||
|
signature_hmac_key_v4: "5f8RLBppaqKGO8bwKwNifjZ6bM8zXCVwkAK7hkhq3PS4pf"
|
||||||
|
signature_format: 'SkyOTT client="{client}",signature="{signature}",timestamp="{timestamp}",version="1.0"'
|
||||||
470
services/PCOK/__init__.py
Normal file
470
services/PCOK/__init__.py
Normal file
@ -0,0 +1,470 @@
|
|||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from pathlib import Path
|
||||||
|
from click import Context
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.tracks.subtitle import Subtitle
|
||||||
|
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Tracks, Audio, Video
|
||||||
|
|
||||||
|
|
||||||
|
class PCOK(Service):
|
||||||
|
"""
|
||||||
|
Service code for NBC's Peacock streaming service (https://peacocktv.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
Security: UHD@-- FHD@L3, doesn't care about releases.
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips: - The library of contents can be viewed without logging in at https://www.peacocktv.com/stream/tv
|
||||||
|
See the footer for links to movies, news, etc. A US IP is required to view.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["PCOK", "peacock"]
|
||||||
|
GEOFENCE = ["us"]
|
||||||
|
TITLE_RE = [
|
||||||
|
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>movies/[a-z0-9/./-]+/[a-f0-9-]+)",
|
||||||
|
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>tv/[a-z0-9-/.]+/\d+)",
|
||||||
|
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>-/[a-z0-9-/.]+/\d+)",
|
||||||
|
r"(?:https?://(?:www\.)?peacocktv\.com/stream-tv/)?(?P<id>[a-z0-9-/.]+)",
|
||||||
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="PCOK", short_help="https://peacocktv.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.option("-m", "--movie", is_flag=True, default=False, help="Title is a movie.")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, **kwargs: Any) -> "PCOK":
|
||||||
|
return PCOK(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title, movie):
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.title = title
|
||||||
|
self.movie = movie
|
||||||
|
|
||||||
|
self.profile = ctx.obj.profile
|
||||||
|
|
||||||
|
self.service_config = None
|
||||||
|
self.hmac_key = None
|
||||||
|
self.tokens = None
|
||||||
|
self.license_api = None
|
||||||
|
self.license_bt = None
|
||||||
|
self.vcodec = ctx.parent.params["vcodec"]
|
||||||
|
self.range = {"SDR": "SDR", "HDR10": "HDR10", "DV": "DOLBYVISION"}.get(ctx.parent.params["range_"][0])
|
||||||
|
|
||||||
|
self.configure()
|
||||||
|
title = self.title.split("/")[-1]
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
# Title is a slug, e.g. `/tv/the-office/4902514835143843112` or just `the-office`
|
||||||
|
|
||||||
|
if "/" not in self.title:
|
||||||
|
r = self.session.get(self.config["endpoints"]["stream_tv"].format(title_id=self.title))
|
||||||
|
self.title = self.find("/watch/asset(/[^']+)", r.text)
|
||||||
|
if not self.title:
|
||||||
|
raise self.log.error(" - Title ID not found or invalid")
|
||||||
|
|
||||||
|
if not self.title.startswith("/"):
|
||||||
|
self.title = f"/{self.title}"
|
||||||
|
|
||||||
|
if self.title.startswith("/movies/"):
|
||||||
|
self.movie = True
|
||||||
|
|
||||||
|
if self.title.startswith("/stream-tv"):
|
||||||
|
self.tv = True
|
||||||
|
|
||||||
|
res = self.session.get(
|
||||||
|
url=self.config["endpoints"]["node"],
|
||||||
|
params={"slug": self.title, "represent": "(items(items))"},
|
||||||
|
headers={
|
||||||
|
"Accept": "*",
|
||||||
|
"Referer": f"https://www.peacocktv.com/watch/asset{self.title}",
|
||||||
|
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||||
|
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||||
|
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||||
|
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||||
|
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||||
|
"X-SkyOTT-Language": "en",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
|
||||||
|
if self.movie:
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=self.title,
|
||||||
|
service=self.__class__,
|
||||||
|
name=res["attributes"]["title"],
|
||||||
|
year=res["attributes"]["year"],
|
||||||
|
data=res,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
titles = []
|
||||||
|
for season in res["relationships"]["items"]["data"]:
|
||||||
|
for episode in season["relationships"]["items"]["data"]:
|
||||||
|
titles.append(episode)
|
||||||
|
titles = []
|
||||||
|
for season in res["relationships"]["items"]["data"]:
|
||||||
|
for episode in season["relationships"]["items"]["data"]:
|
||||||
|
titles.append(
|
||||||
|
Episode(
|
||||||
|
id_=self.title,
|
||||||
|
service=self.__class__,
|
||||||
|
title=res["attributes"]["title"],
|
||||||
|
year=episode["attributes"].get("year"),
|
||||||
|
season=episode["attributes"].get("seasonNumber"),
|
||||||
|
number=episode["attributes"].get("episodeNumber"),
|
||||||
|
name=episode["attributes"].get("title"),
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return Series(titles)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
supported_colour_spaces = Video.Range.HDR10
|
||||||
|
|
||||||
|
if self.range == "HDR10":
|
||||||
|
self.log.info("Switched dynamic range to HDR10")
|
||||||
|
supported_colour_spaces = ["HDR10"]
|
||||||
|
if self.range == "DV":
|
||||||
|
self.log.info("Switched dynamic range to DV")
|
||||||
|
supported_colour_spaces = ["DolbyVision"]
|
||||||
|
content_id = title.data["attributes"]["formats"]["HD"]["contentId"]
|
||||||
|
variant_id = title.data["attributes"]["providerVariantId"]
|
||||||
|
|
||||||
|
sky_headers = {
|
||||||
|
# order of these matter!
|
||||||
|
"X-SkyOTT-Agent": ".".join(
|
||||||
|
[
|
||||||
|
self.config["client"]["proposition"].lower(),
|
||||||
|
self.config["client"]["device"].lower(),
|
||||||
|
self.config["client"]["platform"].lower(),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
"X-SkyOTT-PinOverride": "false",
|
||||||
|
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||||
|
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||||
|
"X-SkyOTT-UserToken": self.tokens["userToken"],
|
||||||
|
}
|
||||||
|
|
||||||
|
body = json.dumps(
|
||||||
|
{
|
||||||
|
"device": {
|
||||||
|
# maybe get these from the config endpoint?
|
||||||
|
"capabilities": [
|
||||||
|
{
|
||||||
|
"protection": "WIDEVINE",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
"transport": "DASH",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"vcodec": "H265",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"protection": "NONE",
|
||||||
|
"container": "ISOBMFF",
|
||||||
|
"transport": "DASH",
|
||||||
|
"acodec": "AAC",
|
||||||
|
"vcodec": "H265",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"maxVideoFormat": "UHD",
|
||||||
|
"supportedColourSpaces": supported_colour_spaces,
|
||||||
|
"model": self.config["client"]["platform"],
|
||||||
|
"hdcpEnabled": "true",
|
||||||
|
},
|
||||||
|
"client": {
|
||||||
|
"thirdParties": ["FREEWHEEL", "YOSPACE"] # CONVIVA
|
||||||
|
},
|
||||||
|
"contentId": content_id,
|
||||||
|
"providerVariantId": variant_id,
|
||||||
|
"parentalControlPin": "null",
|
||||||
|
},
|
||||||
|
separators=(",", ":"),
|
||||||
|
)
|
||||||
|
|
||||||
|
manifest = self.session.post(
|
||||||
|
url=self.config["endpoints"]["vod"],
|
||||||
|
data=body,
|
||||||
|
headers=dict(
|
||||||
|
**sky_headers,
|
||||||
|
**{
|
||||||
|
"Accept": "application/vnd.playvod.v1+json",
|
||||||
|
"Content-Type": "application/vnd.playvod.v1+json",
|
||||||
|
"X-Sky-Signature": self.create_signature_header(
|
||||||
|
method="POST",
|
||||||
|
path="/video/playouts/vod",
|
||||||
|
sky_headers=sky_headers,
|
||||||
|
body=body,
|
||||||
|
timestamp=int(time.time()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
).json()
|
||||||
|
if "errorCode" in manifest:
|
||||||
|
raise self.log.error(f" - An error occurred: {manifest['description']} [{manifest['errorCode']}]")
|
||||||
|
|
||||||
|
self.license_api = manifest["protection"]["licenceAcquisitionUrl"]
|
||||||
|
self.license_bt = manifest["protection"]["licenceToken"]
|
||||||
|
|
||||||
|
tracks = DASH.from_url(url=manifest["asset"]["endpoints"][0]["url"], session=self.session).to_tracks(
|
||||||
|
language="en"
|
||||||
|
)
|
||||||
|
|
||||||
|
if manifest["asset"]["format"]["colourSpace"] == 'HDR10':
|
||||||
|
for track in tracks:
|
||||||
|
track.range = Video.Range.HDR10
|
||||||
|
elif manifest["asset"]["format"]["colourSpace"] == ["DolbyVision"]:
|
||||||
|
track.range = Video.Range.DV
|
||||||
|
|
||||||
|
# for track in tracks.videos:
|
||||||
|
# if isinstance(track, Video):
|
||||||
|
# if 'UHDDV' in url:
|
||||||
|
# track.range_ = Video.Range.HDR10
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
track.needs_proxy = True
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
if track.language.territory == "AD":
|
||||||
|
# This is supposed to be Audio Description, not Andorra
|
||||||
|
track.language.territory = None
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, *, challenge, title, track):
|
||||||
|
return super().get_widevine_service_certificate(challenge=challenge, title=title, track=track)
|
||||||
|
|
||||||
|
def get_widevine_license(self, title, challenge, **_) -> bytes:
|
||||||
|
return self.session.post(
|
||||||
|
url=self.license_api,
|
||||||
|
headers={
|
||||||
|
"Accept": "*",
|
||||||
|
"X-Sky-Signature": self.create_signature_header(
|
||||||
|
method="POST",
|
||||||
|
path="/" + self.license_api.split("://", 1)[1].split("/", 1)[1],
|
||||||
|
sky_headers={},
|
||||||
|
body="",
|
||||||
|
timestamp=int(time.time()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
data=challenge, # expects bytes
|
||||||
|
).content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
self.session.headers.update({"Origin": "https://www.peacocktv.com"})
|
||||||
|
self.log.info("Getting Peacock Client configuration")
|
||||||
|
if self.config["client"]["platform"] != "PC":
|
||||||
|
self.service_config = self.session.get(
|
||||||
|
url=self.config["endpoints"]["config"].format(
|
||||||
|
territory=self.config["client"]["territory"],
|
||||||
|
provider=self.config["client"]["provider"],
|
||||||
|
proposition=self.config["client"]["proposition"],
|
||||||
|
device=self.config["client"]["platform"],
|
||||||
|
version=self.config["client"]["config_version"],
|
||||||
|
)
|
||||||
|
).json()
|
||||||
|
self.hmac_key = bytes(self.config["security"]["signature_hmac_key_v4"], "utf-8")
|
||||||
|
self.log.info("Getting Authorization Tokens")
|
||||||
|
self.tokens = self.get_tokens()
|
||||||
|
self.log.info("Verifying Authorization Tokens")
|
||||||
|
if not self.verify_tokens():
|
||||||
|
raise self.log.error(" - Failed! Cookies might be outdated.")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_sky_header_md5(headers):
|
||||||
|
if len(headers.items()) > 0:
|
||||||
|
headers_str = "\n".join(f"{x[0].lower()}: {x[1]}" for x in headers.items()) + "\n"
|
||||||
|
else:
|
||||||
|
headers_str = "{}"
|
||||||
|
return str(hashlib.md5(headers_str.encode()).hexdigest())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_body_md5(body):
|
||||||
|
return str(hashlib.md5(body.encode()).hexdigest())
|
||||||
|
|
||||||
|
def calculate_signature(self, msg):
|
||||||
|
digest = hmac.new(self.hmac_key, bytes(msg, "utf-8"), hashlib.sha1).digest()
|
||||||
|
return str(base64.b64encode(digest), "utf-8")
|
||||||
|
|
||||||
|
def create_signature_header(self, method, path, sky_headers, body, timestamp):
|
||||||
|
data = (
|
||||||
|
"\n".join(
|
||||||
|
[
|
||||||
|
method.upper(),
|
||||||
|
path,
|
||||||
|
"", # important!
|
||||||
|
self.config["client"]["client_sdk"],
|
||||||
|
"1.0",
|
||||||
|
self.calculate_sky_header_md5(sky_headers),
|
||||||
|
str(timestamp),
|
||||||
|
self.calculate_body_md5(body),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
+ "\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
signature_hmac = self.calculate_signature(data)
|
||||||
|
|
||||||
|
return self.config["security"]["signature_format"].format(
|
||||||
|
client=self.config["client"]["client_sdk"], signature=signature_hmac, timestamp=timestamp
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tokens(self):
|
||||||
|
# Try to get cached tokens
|
||||||
|
cache_path = Path(
|
||||||
|
config.directories.cache
|
||||||
|
/ self.__class__.__name__
|
||||||
|
/ f"token.json".format(profile=self.profile, id=self.config["client"]["id"])
|
||||||
|
)
|
||||||
|
if os.path.isfile(cache_path):
|
||||||
|
with open(cache_path, encoding="utf-8") as fd:
|
||||||
|
tokens = json.load(fd)
|
||||||
|
tokens_expiration = tokens.get("tokenExpiryTime", None)
|
||||||
|
if tokens_expiration and datetime.strptime(tokens_expiration, "%Y-%m-%dT%H:%M:%S.%fZ") > datetime.now():
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
# Get all SkyOTT headers
|
||||||
|
sky_headers = {
|
||||||
|
# Order of these matters!
|
||||||
|
"X-SkyOTT-Agent": ".".join(
|
||||||
|
[
|
||||||
|
self.config["client"]["proposition"],
|
||||||
|
self.config["client"]["device"],
|
||||||
|
self.config["client"]["platform"],
|
||||||
|
]
|
||||||
|
).lower(),
|
||||||
|
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||||
|
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||||
|
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||||
|
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||||
|
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Call personas endpoint to get the accounts personaId
|
||||||
|
personas = self.session.get(
|
||||||
|
url=self.config["endpoints"]["personas"],
|
||||||
|
headers=dict(
|
||||||
|
**sky_headers,
|
||||||
|
**{
|
||||||
|
"Accept": "application/vnd.persona.v1+json",
|
||||||
|
"Content-Type": "application/vnd.persona.v1+json",
|
||||||
|
"X-SkyOTT-TokenType": self.config["client"]["auth_scheme"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
).json()
|
||||||
|
except requests.HTTPError as e:
|
||||||
|
error = e.response.json()
|
||||||
|
if "message" in error and "code" in error:
|
||||||
|
error = f"{error['message']} [{error['code']}]"
|
||||||
|
if "bad credentials" in error.lower():
|
||||||
|
error += ". Cookies may be expired or invalid."
|
||||||
|
raise self.log.exit(f" - Unable to get persona ID: {error}")
|
||||||
|
raise self.log.exit(f" - HTTP Error {e.response.status_code}: {e.response.reason}")
|
||||||
|
persona = personas["personas"][0]["personaId"]
|
||||||
|
|
||||||
|
# Craft the body data that will be sent to the tokens endpoint, being minified and order matters!
|
||||||
|
body = json.dumps(
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"authScheme": self.config["client"]["auth_scheme"],
|
||||||
|
"authIssuer": self.config["client"]["auth_issuer"],
|
||||||
|
"provider": self.config["client"]["provider"],
|
||||||
|
"providerTerritory": self.config["client"]["territory"],
|
||||||
|
"proposition": self.config["client"]["proposition"],
|
||||||
|
"personaId": persona,
|
||||||
|
},
|
||||||
|
"device": {
|
||||||
|
"type": self.config["client"]["device"],
|
||||||
|
"platform": self.config["client"]["platform"],
|
||||||
|
"id": self.config["client"]["id"],
|
||||||
|
"drmDeviceId": self.config["client"]["drm_device_id"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
separators=(",", ":"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the tokens
|
||||||
|
tokens = self.session.post(
|
||||||
|
url=self.config["endpoints"]["tokens"],
|
||||||
|
headers=dict(
|
||||||
|
**sky_headers,
|
||||||
|
**{
|
||||||
|
"Accept": "application/vnd.tokens.v1+json",
|
||||||
|
"Content-Type": "application/vnd.tokens.v1+json",
|
||||||
|
"X-Sky-Signature": self.create_signature_header(
|
||||||
|
method="POST",
|
||||||
|
path="/auth/tokens",
|
||||||
|
sky_headers=sky_headers,
|
||||||
|
body=body,
|
||||||
|
timestamp=int(time.time()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
data=body,
|
||||||
|
).json()
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(cache_path), exist_ok=True)
|
||||||
|
with open(cache_path, "w", encoding="utf-8") as fd:
|
||||||
|
json.dump(tokens, fd)
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
def verify_tokens(self):
|
||||||
|
"""Verify the tokens by calling the /auth/users/me endpoint and seeing if it works"""
|
||||||
|
sky_headers = {
|
||||||
|
# order of these matter!
|
||||||
|
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||||
|
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||||
|
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||||
|
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||||
|
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||||
|
"X-SkyOTT-UserToken": self.tokens["userToken"],
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
self.session.get(
|
||||||
|
url=self.config["endpoints"]["me"],
|
||||||
|
headers=dict(
|
||||||
|
**sky_headers,
|
||||||
|
**{
|
||||||
|
"Accept": "application/vnd.userinfo.v2+json",
|
||||||
|
"Content-Type": "application/vnd.userinfo.v2+json",
|
||||||
|
"X-Sky-Signature": self.create_signature_header(
|
||||||
|
method="GET",
|
||||||
|
path="/auth/users/me",
|
||||||
|
sky_headers=sky_headers,
|
||||||
|
body="",
|
||||||
|
timestamp=int(time.time()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except requests.HTTPError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
27
services/PCOK/config.yaml
Normal file
27
services/PCOK/config.yaml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
endpoints:
|
||||||
|
stream_tv: "https://www.peacocktv.com/stream-tv/{title_id}"
|
||||||
|
config: "https://config.clients.peacocktv.com/{territory}/{provider}/{proposition}/{device}/PROD/{version}/config.json"
|
||||||
|
login: "https://rango.id.peacocktv.com/signin/service/international"
|
||||||
|
personas: "https://persona.id.peacocktv.com/persona-store/personas"
|
||||||
|
tokens: "https://ovp.peacocktv.com/auth/tokens"
|
||||||
|
me: "https://ovp.peacocktv.com/auth/users/me"
|
||||||
|
node: "https://atom.peacocktv.com/adapter-calypso/v3/query/node"
|
||||||
|
vod: "https://ovp.peacocktv.com/video/playouts/vod"
|
||||||
|
|
||||||
|
client:
|
||||||
|
config_version: "1.0.8"
|
||||||
|
territory: "US"
|
||||||
|
provider: "NBCU"
|
||||||
|
proposition: "NBCUOTT"
|
||||||
|
platform: "ANDROID" # PC, ANDROID
|
||||||
|
device: "TABLET" # COMPUTER, TABLET
|
||||||
|
id: "Jcvf1y0whKOI29vRXcJy"
|
||||||
|
drm_device_id: "UNKNOWN"
|
||||||
|
client_sdk: "NBCU-WEB-v4" # NBCU-ANDROID-v3 NBCU-ANDRTV-v4
|
||||||
|
auth_scheme: "MESSO"
|
||||||
|
auth_issuer: "NOWTV"
|
||||||
|
|
||||||
|
security:
|
||||||
|
signature_hmac_key_v4: "FvT9VtwvhtSZvqnExMsvDDTEvBqR3HdsMcBFtWYV"
|
||||||
|
signature_hmac_key_v6: "izU6EJqqu6DOhOWSk5X4p9dod3fNqH7vzKtYDK8d"
|
||||||
|
signature_format: 'SkyOTT client="{client}",signature="{signature}",timestamp="{timestamp}",version="1.0"'
|
||||||
292
services/PLUTO/__init__.py
Normal file
292
services/PLUTO/__init__.py
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapters, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class PLUTO(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for Pluto TV on demand streaming service (https://pluto.tv/)
|
||||||
|
Credit to @wks_uwu for providing an alternative API, making the codebase much cleaner
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 720p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be complete title URL or just the path:
|
||||||
|
SERIES: /series/65ce4e5003fa740013793127/details
|
||||||
|
EPISODE: /series/65ce4e5003fa740013793127/season/1/episode/662c2af0a9f2d200131ba731
|
||||||
|
MOVIE: /movies/635c1e430888bc001ad01a9b/details
|
||||||
|
- Use --lang LANG_RANGE option to request non-English tracks
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Both DASH(widevine) and HLS(AES) are looked for in the API
|
||||||
|
- HLS is prioritized over DASH, because the DASH version will sometimes have sync issues
|
||||||
|
- Pluto use transport streams for HLS, meaning the video and audio are a part of the same stream
|
||||||
|
As a result, only videos are listed as tracks. But the audio will be included as well.
|
||||||
|
- With the variations in manifests, and the inconsistency in the API, the language is set as "en" by default
|
||||||
|
for all tracks, no matter what region you're in.
|
||||||
|
You can manually set the language in the get_titles() function if you want to change it.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("plu", "plutotv")
|
||||||
|
TITLE_RE = (
|
||||||
|
r"^"
|
||||||
|
r"(?:https?://(?:www\.)?pluto\.tv(?:/[a-z]{2})?)?"
|
||||||
|
r"(?:https?://(?:www\.)?pluto\.tv(?:/latam)?)?"
|
||||||
|
r"(?:/on-demand)?"
|
||||||
|
r"(?:/search)?"
|
||||||
|
r"(?:/details)?"
|
||||||
|
r"/(?P<type>movies|series)"
|
||||||
|
r"/(?P<id>[a-z0-9-]+)"
|
||||||
|
r"(?:(?:/season/(\d+)/episode/(?P<episode>[a-z0-9-]+)))?"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="PLUTO", short_help="https://pluto.tv/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return PLUTO(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
|
||||||
|
self.session.params = {
|
||||||
|
"appName": "web",
|
||||||
|
"appVersion": "na",
|
||||||
|
"clientID": str(uuid.uuid1()),
|
||||||
|
"deviceDNT": 0,
|
||||||
|
"deviceId": "unknown",
|
||||||
|
"clientModelNumber": "na",
|
||||||
|
"serverSideAds": "false",
|
||||||
|
"deviceMake": "unknown",
|
||||||
|
"deviceModel": "web",
|
||||||
|
"deviceType": "web",
|
||||||
|
"deviceVersion": "unknown",
|
||||||
|
"sid": str(uuid.uuid1()),
|
||||||
|
"drmCapabilities": "widevine:L3",
|
||||||
|
}
|
||||||
|
|
||||||
|
info = self.session.get(self.config["endpoints"]["auth"]).json()
|
||||||
|
self.token = info["sessionToken"]
|
||||||
|
self.region = info["session"].get("activeRegion", "").lower()
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"q": self.title,
|
||||||
|
"limit": "100",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["search"].format(query=self.title),
|
||||||
|
headers={"Authorization": f"Bearer {self.token}"},
|
||||||
|
params=params,
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
results = r.json()
|
||||||
|
|
||||||
|
for result in results["data"]:
|
||||||
|
if result.get("type") not in ["timeline", "channel"]:
|
||||||
|
content = result.get("id")
|
||||||
|
kind = result.get("type")
|
||||||
|
kind = "movies" if kind == "movie" else "series"
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=f"/{kind}/{content}/details",
|
||||||
|
title=result.get("name"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label=result.get("type"),
|
||||||
|
url=f"https://pluto.tv/{self.region}/on-demand/{kind}/{content}/details",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
try:
|
||||||
|
kind, content_id, episode_id = (
|
||||||
|
re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id", "episode")
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
if kind == "series" and episode_id:
|
||||||
|
r = self.session.get(self.config["endpoints"]["series"].format(season_id=content_id))
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(f"{r.json().get('message')}")
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("_id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=data.get("name"),
|
||||||
|
season=int(episode.get("season")),
|
||||||
|
number=int(episode.get("number")),
|
||||||
|
name=episode.get("name"),
|
||||||
|
year=None,
|
||||||
|
language="en", # self.region,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for series in data["seasons"]
|
||||||
|
for episode in series["episodes"]
|
||||||
|
if episode.get("_id") == episode_id
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif kind == "series":
|
||||||
|
r = self.session.get(self.config["endpoints"]["series"].format(season_id=content_id))
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(f"{r.json().get('message')}")
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("_id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=data.get("name"),
|
||||||
|
season=int(episode.get("season")),
|
||||||
|
number=int(episode.get("number")),
|
||||||
|
name=episode.get("name"),
|
||||||
|
year=self.year(episode),
|
||||||
|
language="en", # self.region,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for series in data["seasons"]
|
||||||
|
for episode in series["episodes"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif kind == "movies":
|
||||||
|
url = self.config["endpoints"]["movie"].format(video_id=content_id)
|
||||||
|
r = self.session.get(url, headers={"Authorization": f"Bearer {self.token}"})
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(f"{r.json().get('message')}")
|
||||||
|
|
||||||
|
data = r.json()
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("_id"),
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie.get("name"),
|
||||||
|
language="en", # self.region,
|
||||||
|
data=movie,
|
||||||
|
year=self.year(movie),
|
||||||
|
)
|
||||||
|
for movie in data
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
url = self.config["endpoints"]["episodes"].format(episode_id=title.id)
|
||||||
|
episode = self.session.get(url).json()
|
||||||
|
|
||||||
|
sources = next((item.get("sources") for item in episode if not self.bumpers(item.get("name", ""))), None)
|
||||||
|
|
||||||
|
if not sources:
|
||||||
|
raise ValueError("Unable to find manifest for this title")
|
||||||
|
|
||||||
|
dash = next((x.get("file") for x in sources if x.get("type").lower() == "dash"), None)
|
||||||
|
hls = next((x.get("file") for x in sources if x.get("type").lower() == "hls"), None)
|
||||||
|
|
||||||
|
|
||||||
|
if dash:
|
||||||
|
self.license = self.config["endpoints"]["license"]
|
||||||
|
manifest = dash.replace("https://siloh.pluto.tv", "http://silo-hybrik.pluto.tv.s3.amazonaws.com")
|
||||||
|
tracks = DASH.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
else:
|
||||||
|
self.license = None
|
||||||
|
m3u8_url = hls.replace("https://siloh.pluto.tv", "http://silo-hybrik.pluto.tv.s3.amazonaws.com")
|
||||||
|
manifest = self.clean_manifest(self.session.get(m3u8_url).text)
|
||||||
|
tracks = HLS.from_text(manifest, m3u8_url).to_tracks(language=title.language)
|
||||||
|
|
||||||
|
# Remove separate AD audio tracks
|
||||||
|
for track in tracks.audio:
|
||||||
|
tracks.audio.remove(track)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> Chapters:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
if not self.license:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# service specific functions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def clean_manifest(text: str) -> str:
|
||||||
|
# Remove fairplay entries
|
||||||
|
index = text.find('#PLUTO-DRM:ID="fairplay')
|
||||||
|
if index == -1:
|
||||||
|
return text
|
||||||
|
else:
|
||||||
|
end_of_previous_line = text.rfind("\n", 0, index)
|
||||||
|
if end_of_previous_line == -1:
|
||||||
|
return ""
|
||||||
|
else:
|
||||||
|
return text[:end_of_previous_line]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def bumpers(text: str) -> bool:
|
||||||
|
ads = (
|
||||||
|
"Pluto_TV_OandO",
|
||||||
|
"_ad",
|
||||||
|
"creative",
|
||||||
|
"Bumper",
|
||||||
|
"Promo",
|
||||||
|
"WarningCard",
|
||||||
|
)
|
||||||
|
|
||||||
|
return any(ad in text for ad in ads)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def year(data: dict) -> Optional[int]:
|
||||||
|
title_year = (int(match.group(1)) if (match := re.search(r"\((\d{4})\)", data.get("name", ""))) else None)
|
||||||
|
slug_year = (int(match.group(1)) if (match := re.search(r"\b(\d{4})\b", data.get("slug", ""))) else None)
|
||||||
|
return None if title_year else slug_year
|
||||||
|
|
||||||
7
services/PLUTO/config.yaml
Normal file
7
services/PLUTO/config.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
endpoints:
|
||||||
|
auth: https://boot.pluto.tv/v4/start
|
||||||
|
search: https://service-media-search.clusters.pluto.tv/v1/search
|
||||||
|
series: https://service-vod.clusters.pluto.tv/v3/vod/series/{season_id}/seasons
|
||||||
|
episodes: http://api.pluto.tv/v2/episodes/{episode_id}/clips.json
|
||||||
|
movie: https://service-vod.clusters.pluto.tv/v4/vod/items?ids={video_id}
|
||||||
|
license: https://service-concierge.clusters.pluto.tv/v1/wv/alt
|
||||||
292
services/PMTP/__init__.py
Normal file
292
services/PMTP/__init__.py
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from devine.core.utils.sslciphers import SSLCiphers
|
||||||
|
from devine.core.utils.xml import load_xml
|
||||||
|
|
||||||
|
|
||||||
|
class PMTP(Service):
|
||||||
|
"""
|
||||||
|
Service code for Paramount's Paramount+ streaming service (https://paramountplus.com).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None for US | Credentials for INTL
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 2160p, DDP5.1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input should be complete URLs for both shows and movies:
|
||||||
|
https://www.paramountplus.com/movies/video/3vxCeaSHnqJLmatpwS1OzrkdA16h7sN9/
|
||||||
|
https://www.paramountplus.com/shows/special-ops-lioness/
|
||||||
|
- Use -r, --region to specify your region. If not used, the default is "us".
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Credentials are only required for INTL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("paramountplus", "paramount+")
|
||||||
|
# GEOFENCE = ("us",)
|
||||||
|
TITLE_RE = r"https://www\.paramountplus\.com(/.*?)?/(?P<type>shows|movies)(/(?P<video>.*?))?/(?P<id>[a-zA-Z0-9_-]+)"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="PMTP", short_help="https://paramountplus.com")
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.option("-r", "--region", default="us", help="Specify region (default: US)")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return PMTP(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str, region: str):
|
||||||
|
self.title = title
|
||||||
|
self.region = region.lower()
|
||||||
|
self.GEOFENCE = (self.region,)
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.endpoints = self.config["endpoints"].get(self.region, self.config["endpoints"]["intl"])
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"term": self.title,
|
||||||
|
"termCount": 50,
|
||||||
|
"showCanVids": "true",
|
||||||
|
}
|
||||||
|
results = self._request("GET", "/apps-api/v3.1/androidphone/contentsearch/search.json", params=params)["terms"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("path"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=None,
|
||||||
|
label=result.get("term_type"),
|
||||||
|
url=result.get("path"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if self.region != "us":
|
||||||
|
if not credential:
|
||||||
|
raise EnvironmentError("INTL requires Credentials for Authentication.")
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"j_username": credential.username,
|
||||||
|
"j_password": credential.password,
|
||||||
|
}
|
||||||
|
self._request("POST", "/apps-api/v2.0/androidphone/auth/login.json", params=params)
|
||||||
|
|
||||||
|
status = self._request("GET", "/apps-api/v3.0/androidphone/login/status.json")
|
||||||
|
if not status.get("isLoggedIn"):
|
||||||
|
raise ConnectionError(" - Failed to authenticate user. Credentials may be invalid.")
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
try:
|
||||||
|
kind, video, title_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "video", "id"))
|
||||||
|
except Exception:
|
||||||
|
self.log.error("- Could not parse ID from title")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if kind == "movies":
|
||||||
|
movies = self._movie(title_id)
|
||||||
|
return Movies(movies)
|
||||||
|
|
||||||
|
elif video:
|
||||||
|
episodes = self._episode(title_id)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
elif kind == "shows":
|
||||||
|
episodes = self._show(title_id)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
self.token, self.license = self.ls_session(title.id)
|
||||||
|
manifest = self.get_manifest(title)
|
||||||
|
tracks = DASH.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Episode) -> Chapters:
|
||||||
|
if not title.data.get("playbackEvents", {}).get("endCreditChapterTimeMs"):
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
end_credits = title.data["playbackEvents"]["endCreditChapterTimeMs"]
|
||||||
|
return Chapters([Chapter(name="Credits", timestamp=end_credits)])
|
||||||
|
|
||||||
|
def certificate(self, *, challenge: bytes, title: Title_T, track: AnyTrack):
|
||||||
|
return self.session.post(self.license, data=challenge).content
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
headers = {"Authorization": f"Bearer {self.token}"}
|
||||||
|
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||||
|
if not r.ok:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
|
||||||
|
def _movie(self, title: str) -> Movie:
|
||||||
|
res = self._request(
|
||||||
|
"GET", "/apps-api/v3.0/androidphone/movies/{}.json".format(title),
|
||||||
|
params={"includeContentInfo": "true"}
|
||||||
|
)
|
||||||
|
title = res["movie"]["movieContent"]
|
||||||
|
return [
|
||||||
|
Movie(
|
||||||
|
id_=title["contentId"],
|
||||||
|
name=title["title"],
|
||||||
|
year=title["_airDateISO"][:4],
|
||||||
|
language=res["movie"].get("locale", "en-US"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=title,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _show(self, title: str) -> Episode:
|
||||||
|
data = self._request("GET", "/apps-api/v3.0/androidphone/shows/slug/{}.json".format(title))
|
||||||
|
|
||||||
|
links = next((x.get("links") for x in data["showMenu"] if x.get("device_app_id") == "all_platforms"), None)
|
||||||
|
config = next((x.get("videoConfigUniqueName") for x in links if x.get("title").strip() == "Episodes"), None)
|
||||||
|
show = next((x for x in data["show"]["results"] if x.get("type") == "show"), None)
|
||||||
|
seasons = [x["seasonNum"] for x in data["available_video_seasons"]["itemList"] if x.get("seasonNum")]
|
||||||
|
locale = show.get("locale", "en-US")
|
||||||
|
|
||||||
|
show_data = self._request(
|
||||||
|
"GET",
|
||||||
|
"/apps-api/v2.0/androidphone/shows/{}/videos/config/{}.json".format(show.get("show_id"), config),
|
||||||
|
params={"platformType": "apps", "rows": "1", "begin": "0"},
|
||||||
|
)
|
||||||
|
|
||||||
|
section = next(
|
||||||
|
(x["sectionId"] for x in show_data["videoSectionMetadata"] if x["title"] == "Full Episodes"), None
|
||||||
|
)
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in seasons:
|
||||||
|
res = self._request(
|
||||||
|
"GET",
|
||||||
|
"/apps-api/v2.0/androidphone/videos/section/{}.json".format(section),
|
||||||
|
params={"begin": "0", "rows": "999", "params": f"seasonNum={season}", "seasonNum": season},
|
||||||
|
)
|
||||||
|
episodes.extend(res["sectionItems"].get("itemList"))
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode["contentId"],
|
||||||
|
title=episode["seriesTitle"],
|
||||||
|
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||||
|
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||||
|
name=episode["label"],
|
||||||
|
language=locale,
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
if episode["fullEpisode"]
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, title: str) -> Episode:
|
||||||
|
data = self._request("GET", "/apps-api/v2.0/androidphone/video/cid/{}.json".format(title))
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode["contentId"],
|
||||||
|
title=episode["seriesTitle"],
|
||||||
|
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||||
|
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||||
|
name=episode["label"],
|
||||||
|
language=episode.get("locale", "en-US"),
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in data["itemList"]
|
||||||
|
]
|
||||||
|
|
||||||
|
def ls_session(self, content_id: str) -> str:
|
||||||
|
res = self._request(
|
||||||
|
"GET",
|
||||||
|
"/apps-api/v3.1/androidphone/irdeto-control/anonymous-session-token.json",
|
||||||
|
params={"contentId": content_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
return res.get("ls_session"), res.get("url")
|
||||||
|
|
||||||
|
def get_manifest(self, title: Episode) -> str:
|
||||||
|
try:
|
||||||
|
res = self._request(
|
||||||
|
"GET",
|
||||||
|
"http://link.theplatform.com/s/{}/media/guid/2198311517/{}".format(
|
||||||
|
title.data.get("cmsAccountId"), title.id
|
||||||
|
),
|
||||||
|
params={
|
||||||
|
"format": "SMIL",
|
||||||
|
"assetTypes": "|".join(self.config["assets"]),
|
||||||
|
"formats": "MPEG-DASH,MPEG4,M3U",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
body = load_xml(res).find("body").find("seq").findall("switch")
|
||||||
|
bitrate = max(body, key=lambda x: int(x.find("video").get("system-bitrate")))
|
||||||
|
videos = [x.get("src") for x in bitrate.findall("video")]
|
||||||
|
if not videos:
|
||||||
|
raise ValueError("Could not find any streams - is the title still available?")
|
||||||
|
|
||||||
|
manifest = next(
|
||||||
|
(x for x in videos if "hdr_dash" in x.lower()),
|
||||||
|
next((x for x in videos if "cenc_dash" in x.lower()), videos[0]),
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning("ThePlatform request failed: {}, falling back to streaming manifest".format(e))
|
||||||
|
if not title.data.get("streamingUrl"):
|
||||||
|
raise ValueError("Could not find any streams - is the title still available?")
|
||||||
|
|
||||||
|
manifest = title.data.get("streamingUrl")
|
||||||
|
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
def _request(self, method: str, api: str, params: dict = None, headers: dict = None) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.endpoints["base_url"], api)
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
self.session.params = {"at": self.endpoints["token"]}
|
||||||
|
#for prefix in ("https://", "http://"):
|
||||||
|
# self.session.mount(prefix, SSLCiphers(security_level=2))
|
||||||
|
|
||||||
|
if params:
|
||||||
|
self.session.params.update(params)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, url))
|
||||||
|
|
||||||
|
response = self.session.send(prep)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"{response.text}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(response.content)
|
||||||
|
if not data.get("success"):
|
||||||
|
raise ValueError(data.get("message"))
|
||||||
|
return data
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return response.text
|
||||||
19
services/PMTP/config.yaml
Normal file
19
services/PMTP/config.yaml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
headers:
|
||||||
|
user-agent: Mozilla/5.0 (Linux; Android 13; SM-A536E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Mobile Safari/537.36
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
us:
|
||||||
|
base_url: https://www.paramountplus.com
|
||||||
|
token: ABC+2JjrOUYWbaaqKmzwPdppq0RDB2WdufcFmIsSnJDmDEQpVgyAjQpqpEDksKZNMKQ=
|
||||||
|
intl:
|
||||||
|
base_url: https://www.intl.paramountplus.com
|
||||||
|
token: ABAS/G30Pp6tJuNOlZ1OEE6Rf5goS0KjICkGkBVIapVuxemiiASyWVfW4v7SUeAkogc=
|
||||||
|
|
||||||
|
assets:
|
||||||
|
- HLS_AES
|
||||||
|
- DASH_LIVE
|
||||||
|
- DASH_CENC
|
||||||
|
- DASH_CENC_HDR10
|
||||||
|
- DASH_LIVE
|
||||||
|
- DASH_TA
|
||||||
|
- DASH_CENC_PS4
|
||||||
149
services/RBOX/__init__.py
Normal file
149
services/RBOX/__init__.py
Normal file
File diff suppressed because one or more lines are too long
249
services/ROKU/__init__.py
Normal file
249
services/ROKU/__init__.py
Normal file
@ -0,0 +1,249 @@
|
|||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import unquote, urlparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class ROKU(Service):
|
||||||
|
"""
|
||||||
|
Service code for The Roku Channel (https://therokuchannel.roku.com)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Cookies (optional)
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 1080p, DD5.1
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title/episode URL or id as input:
|
||||||
|
https://therokuchannel.roku.com/details/e05fc677ab9c5d5e8332f123770697b9/paddington
|
||||||
|
OR
|
||||||
|
e05fc677ab9c5d5e8332f123770697b9
|
||||||
|
- Supports movies, series, and single episodes
|
||||||
|
- Search is geofenced
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("us",)
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www.)?therokuchannel.roku.com/(?:details|watch)/)?(?P<id>[a-z0-9-]+)"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="ROKU", short_help="https://therokuchannel.roku.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return ROKU(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = re.match(self.TITLE_RE, title).group("id")
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.license: str
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if cookies is not None:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
token = self.session.get(self.config["endpoints"]["token"]).json()["csrf"]
|
||||||
|
|
||||||
|
headers = {"csrf-token": token}
|
||||||
|
payload = {"query": self.title}
|
||||||
|
|
||||||
|
r = self.session.post(self.config["endpoints"]["search"], headers=headers, json=payload)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
results = r.json()
|
||||||
|
for result in results["view"]:
|
||||||
|
if result["content"]["type"] not in ["zone", "provider"]:
|
||||||
|
_id = result["content"].get("meta", {}).get("id")
|
||||||
|
_desc = result["content"].get("descriptions", {})
|
||||||
|
|
||||||
|
label = f'{result["content"].get("type")} ({result["content"].get("releaseYear")})'
|
||||||
|
if result["content"].get("viewOptions"):
|
||||||
|
label += f' ({result["content"]["viewOptions"][0].get("priceDisplay")})'
|
||||||
|
|
||||||
|
title = re.sub(r"^-|-$", "", re.sub(r"\W+", "-", result["content"].get("title").lower()))
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=_id,
|
||||||
|
title=title,
|
||||||
|
description=_desc["250"]["text"] if _desc.get("250") else None,
|
||||||
|
label=label,
|
||||||
|
url=f"https://therokuchannel.roku.com/details/{_id}/{title}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
data = self.session.get(self.config["endpoints"]["content"] + self.title).json()
|
||||||
|
if not data["isAvailable"]:
|
||||||
|
self.log.error("This title is temporarily unavailable or expired")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if data["type"] in ["movie", "tvspecial"]:
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=data["meta"]["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=data["title"],
|
||||||
|
year=data["releaseYear"],
|
||||||
|
language=data["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||||
|
data=None,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif data["type"] == "series":
|
||||||
|
episodes = self.fetch_episodes(data)
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["meta"]["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["title"],
|
||||||
|
season=int(episode["seasonNumber"]),
|
||||||
|
number=int(episode["episodeNumber"]),
|
||||||
|
name=episode["title"],
|
||||||
|
year=data["releaseYear"],
|
||||||
|
language=episode["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||||
|
data=None,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
elif data["type"] == "episode":
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=data["meta"]["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["title"],
|
||||||
|
season=int(data["seasonNumber"]),
|
||||||
|
number=int(data["episodeNumber"]),
|
||||||
|
name=data["title"],
|
||||||
|
year=data["releaseYear"],
|
||||||
|
language=data["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||||
|
data=None,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
token = self.session.get(self.config["endpoints"]["token"]).json()["csrf"]
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"csrf-token": token,
|
||||||
|
}
|
||||||
|
payload = {
|
||||||
|
"rokuId": title.id,
|
||||||
|
"mediaFormat": "mpeg-dash",
|
||||||
|
"drmType": "widevine",
|
||||||
|
"quality": "fhd",
|
||||||
|
"providerId": "rokuavod",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.post(
|
||||||
|
self.config["endpoints"]["vod"],
|
||||||
|
headers=headers,
|
||||||
|
json=payload,
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
videos = r.json()["playbackMedia"]["videos"]
|
||||||
|
self.license = next(
|
||||||
|
(
|
||||||
|
x["drmParams"]["licenseServerURL"]
|
||||||
|
for x in videos
|
||||||
|
if x.get("drmParams") and x["drmParams"]["keySystem"] == "Widevine"
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
url = next((x["url"] for x in videos if x["streamFormat"] == "dash"), None)
|
||||||
|
if url and "origin" in urlparse(url).query:
|
||||||
|
url = unquote(urlparse(url).query.split("=")[1]).split("?")[0]
|
||||||
|
|
||||||
|
tracks = DASH.from_url(url=url).to_tracks(language=title.language)
|
||||||
|
tracks.videos[0].data["playbackMedia"] = r.json()["playbackMedia"]
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||||
|
if label is not None and "description" in label.text:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
for track in tracks.subtitles:
|
||||||
|
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||||
|
if label is not None and "caption" in label.text:
|
||||||
|
track.cc = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||||
|
track = title.tracks.videos[0]
|
||||||
|
|
||||||
|
chapters = []
|
||||||
|
if track.data.get("playbackMedia", {}).get("adBreaks"):
|
||||||
|
timestamps = sorted(track.data["playbackMedia"]["adBreaks"])
|
||||||
|
chapters = [Chapter(name=f"Chapter {i + 1:02}", timestamp=ad.split(".")[0]) for i, ad in enumerate(timestamps)]
|
||||||
|
|
||||||
|
if track.data.get("playbackMedia", {}).get("creditCuePoints"):
|
||||||
|
start = next((
|
||||||
|
x.get("start") for x in track.data["playbackMedia"]["creditCuePoints"] if x.get("start") != 0), None)
|
||||||
|
if start:
|
||||||
|
chapters.append(
|
||||||
|
Chapter(
|
||||||
|
name="Credits",
|
||||||
|
timestamp=datetime.fromtimestamp((start / 1000), tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return # WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.log.error(r.text)
|
||||||
|
sys.exit(1)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# service specific functions
|
||||||
|
|
||||||
|
def fetch_episode(self, episode: dict) -> json:
|
||||||
|
try:
|
||||||
|
r = self.session.get(self.config["endpoints"]["content"] + episode["meta"]["id"])
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.json()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
self.log.error(f"An error occurred while fetching episode {episode['meta']['id']}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def fetch_episodes(self, data: dict) -> list:
|
||||||
|
"""TODO: Switch to async once https proxies are fully supported"""
|
||||||
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
tasks = list(executor.map(self.fetch_episode, data["episodes"]))
|
||||||
|
return [task for task in tasks if task is not None]
|
||||||
5
services/ROKU/config.yaml
Normal file
5
services/ROKU/config.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
endpoints:
|
||||||
|
content: https://therokuchannel.roku.com/api/v2/homescreen/content/https%3A%2F%2Fcontent.sr.roku.com%2Fcontent%2Fv1%2Froku-trc%2F
|
||||||
|
vod: https://therokuchannel.roku.com/api/v3/playback
|
||||||
|
token: https://therokuchannel.roku.com/api/v1/csrf
|
||||||
|
search: https://therokuchannel.roku.com/api/v1/search
|
||||||
284
services/RTE/__init__.py
Normal file
284
services/RTE/__init__.py
Normal file
@ -0,0 +1,284 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from devine.core.utils.xml import load_xml
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class RTE(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for RTE Player streaming service (https://www.rte.ie/player/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input (pay attention to the URL format):
|
||||||
|
SERIES: https://www.rte.ie/player/series/crossfire/10003928-00-0000
|
||||||
|
EPISODE: https://www.rte.ie/player/series/crossfire/10003928-00-0000?epguid=AQ10003929-01-0001
|
||||||
|
MOVIE: https://www.rte.ie/player/movie/glass/360230440380
|
||||||
|
|
||||||
|
\b
|
||||||
|
Notes:
|
||||||
|
- Since some content is accessible worldwide, geofence is deactivated.
|
||||||
|
- Using an IE IP-address is recommended to access everything.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# GEOFENCE = ("ie",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="RTE", short_help="https://www.rte.ie/player/", help=__doc__)
|
||||||
|
@click.argument("title", type=str, required=False)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs) -> RTE:
|
||||||
|
return RTE(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.base_url = self.config["endpoints"]["base_url"]
|
||||||
|
self.feed = self.config["endpoints"]["feed"]
|
||||||
|
self.license = self.config["endpoints"]["license"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"byProgramType": "Series|Movie",
|
||||||
|
"q": f"title:({self.title})",
|
||||||
|
"range": "0-40",
|
||||||
|
"schema": "2.15",
|
||||||
|
"sort": "rte$rank|desc",
|
||||||
|
"gzip": "true",
|
||||||
|
"omitInvalidFields": "true",
|
||||||
|
}
|
||||||
|
results = self._request(f"{self.feed}/f/1uC-gC/rte-prd-prd-search", params=params)["entries"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
link = "https://www.rte.ie/player/{}/{}/{}"
|
||||||
|
series = result.get("plprogram$programType").lower() == "series"
|
||||||
|
_id = result.get("guid") if series else result.get("id").split("/")[-1]
|
||||||
|
_title = result.get("title") if series else result.get("plprogram$longTitle")
|
||||||
|
_type = result.get("plprogram$programType")
|
||||||
|
|
||||||
|
title = _title.format(_type, _title, _id).lower()
|
||||||
|
title = re.sub(r"\W+", "-", title)
|
||||||
|
title = re.sub(r"^-|-$", "", title)
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=link.format(_type, title, _id),
|
||||||
|
title=_title,
|
||||||
|
description=result.get("plprogram$shortDescription"),
|
||||||
|
label=_type,
|
||||||
|
url=link.format(_type, title, _id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
title_re = (
|
||||||
|
r"https://www\.rte\.ie/player"
|
||||||
|
r"/(?P<type>series|movie)"
|
||||||
|
r"/(?P<slug>[a-zA-Z0-9_-]+)"
|
||||||
|
r"/(?P<id>[a-zA-Z0-9_\-=?]+)/?$"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
kind, _, title_id = (re.match(title_re, self.title).group(i) for i in ("type", "slug", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("- Could not parse ID from input")
|
||||||
|
|
||||||
|
episode = title_id.split("=")[1] if "epguid" in title_id else None
|
||||||
|
|
||||||
|
if episode:
|
||||||
|
episode = self._episode(title_id, episode)
|
||||||
|
return Series(episode)
|
||||||
|
|
||||||
|
elif kind == "movie":
|
||||||
|
movie = self._movie(title_id)
|
||||||
|
return Movies(movie)
|
||||||
|
|
||||||
|
elif kind == "series":
|
||||||
|
episodes = self._show(title_id)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
self.token, self.account = self.get_config()
|
||||||
|
media = title.data["plprogramavailability$media"][0].get("plmedia$publicUrl")
|
||||||
|
if not media:
|
||||||
|
raise ValueError("Could not find any streams - is the title still available?")
|
||||||
|
|
||||||
|
manifest, self.pid = self.get_manifest(media)
|
||||||
|
tracks = DASH.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Episode) -> Chapters:
|
||||||
|
if not title.data.get("rte$chapters"):
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
timecodes = [x for x in title.data["rte$chapters"]]
|
||||||
|
chapters = [Chapter(timestamp=float(x)) for x in timecodes]
|
||||||
|
|
||||||
|
if title.data.get("rte$creditStart"):
|
||||||
|
chapters.append(Chapter(name="Credits", timestamp=float(title.data["rte$creditStart"])))
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def certificate(self, **_):
|
||||||
|
return None # will use common privacy cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
params = {
|
||||||
|
"token": self.token,
|
||||||
|
"account": self.account,
|
||||||
|
"form": "json",
|
||||||
|
"schema": "1.0",
|
||||||
|
}
|
||||||
|
payload = {
|
||||||
|
"getWidevineLicense": {
|
||||||
|
"releasePid": self.pid,
|
||||||
|
"widevineChallenge": base64.b64encode(challenge).decode("utf-8"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r = self.session.post(url=self.license, params=params, json=payload)
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(f"License request failed: {r.text}")
|
||||||
|
|
||||||
|
return r.json()["getWidevineLicenseResponse"]["license"]
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def _movie(self, title: str) -> Movie:
|
||||||
|
params = {"count": "true", "entries": "true", "byId": title}
|
||||||
|
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs", params=params)["entries"]
|
||||||
|
|
||||||
|
return [
|
||||||
|
Movie(
|
||||||
|
id_=movie["guid"],
|
||||||
|
service=self.__class__,
|
||||||
|
name=movie.get("plprogram$longTitle"),
|
||||||
|
year=movie.get("plprogram$year"),
|
||||||
|
language=movie["plprogram$languages"][0] if movie.get("plprogram$languages") else "eng",
|
||||||
|
data=movie,
|
||||||
|
)
|
||||||
|
for movie in data
|
||||||
|
]
|
||||||
|
|
||||||
|
def _show(self, title: str) -> Episode:
|
||||||
|
entry = self._request("/mpx/1uC-gC/rte-prd-prd-all-movies-series?byGuid={}".format(title))["entries"][0]["id"]
|
||||||
|
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs?bySeriesId={}".format(entry.split("/")[-1]))["entries"]
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("guid"),
|
||||||
|
title=episode.get("plprogram$longTitle"),
|
||||||
|
season=episode.get("plprogram$tvSeasonNumber") or 0,
|
||||||
|
number=episode.get("plprogram$tvSeasonEpisodeNumber") or 0,
|
||||||
|
name=episode.get("description"),
|
||||||
|
language=episode["plprogram$languages"][0] if episode.get("plprogram$languages") else "eng",
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in data
|
||||||
|
if episode["plprogram$programType"] == "episode"
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, title: str, guid: str) -> Episode:
|
||||||
|
title = title.split("?")[0]
|
||||||
|
entry = self._request("/mpx/1uC-gC/rte-prd-prd-all-movies-series?byGuid={}".format(title))["entries"][0]["id"]
|
||||||
|
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs?bySeriesId={}".format(entry.split("/")[-1]))["entries"]
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("guid"),
|
||||||
|
title=episode.get("plprogram$longTitle"),
|
||||||
|
season=episode.get("plprogram$tvSeasonNumber") or 0,
|
||||||
|
number=episode.get("plprogram$tvSeasonEpisodeNumber") or 0,
|
||||||
|
name=episode.get("description"),
|
||||||
|
language=episode["plprogram$languages"][0] if episode.get("plprogram$languages") else "eng",
|
||||||
|
service=self.__class__,
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in data
|
||||||
|
if episode["plprogram$programType"] == "episode" and episode.get("guid") == guid
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_config(self):
|
||||||
|
token = self._request("/servicelayer/api/anonymouslogin")["mpx_token"]
|
||||||
|
account = self._request("/wordpress/wp-content/uploads/standard/web/config.json")["mpx_config"]["account_id"]
|
||||||
|
return token, account
|
||||||
|
|
||||||
|
def get_manifest(self, media_url: str) -> str:
|
||||||
|
try:
|
||||||
|
res = self._request(
|
||||||
|
media_url,
|
||||||
|
params={
|
||||||
|
"formats": "MPEG-DASH",
|
||||||
|
"auth": self.token,
|
||||||
|
"assetTypes": "default:isl",
|
||||||
|
"tracking": "true",
|
||||||
|
"format": "SMIL",
|
||||||
|
"iu": "/3014/RTE_Player_VOD/Android_Phone/NotRegistered",
|
||||||
|
"policy": "168602703",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
root = load_xml(res)
|
||||||
|
video = root.xpath("//switch/video")
|
||||||
|
manifest = video[0].get("src")
|
||||||
|
|
||||||
|
elem = root.xpath("//switch/ref")
|
||||||
|
value = elem[0].find(".//param[@name='trackingData']").get("value")
|
||||||
|
pid = re.search(r"pid=([^|]+)", value).group(1)
|
||||||
|
|
||||||
|
return manifest, pid
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(
|
||||||
|
f"Request for manifest failed: {e}.\n"
|
||||||
|
"Content may be geo-restricted to IE"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _request(self, api: str, params: dict = None, headers: dict = None) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.base_url, api)
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
|
||||||
|
if params:
|
||||||
|
self.session.params.update(params)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request("GET", url))
|
||||||
|
|
||||||
|
response = self.session.send(prep)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(
|
||||||
|
f"Status: {response.status_code} - {response.url}\n"
|
||||||
|
"Content may be geo-restricted to IE"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(response.content)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return response.text
|
||||||
7
services/RTE/config.yaml
Normal file
7
services/RTE/config.yaml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
headers:
|
||||||
|
user-agent: Dalvik/2.1.0 (Linux; U; Android 13; SM-A536E Build/RSR1.210722.013.A2)
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base_url: https://www.rte.ie
|
||||||
|
feed: https://feed.entertainment.tv.theplatform.eu
|
||||||
|
license: https://widevine.entitlement.eu.theplatform.com/wv/web/ModularDrm
|
||||||
256
services/SPOT/__init__.py
Normal file
256
services/SPOT/__init__.py
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
import base64
|
||||||
|
import re
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Optional, Union
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.titles import Song, Album, Title_T
|
||||||
|
from devine.core.tracks import Audio, Tracks
|
||||||
|
from devine.core.drm import DRM_T, Widevine
|
||||||
|
from devine.utils import base62
|
||||||
|
from pywidevine.pssh import PSSH
|
||||||
|
|
||||||
|
class SPOT(Service):
|
||||||
|
"""
|
||||||
|
Service code for Spotify
|
||||||
|
Written by ToonsHub
|
||||||
|
|
||||||
|
Reference: https://github.com/glomatico/spotify-aac-downloader
|
||||||
|
|
||||||
|
Authorization: Cookies (Free - 128kbps and Premium - 256kbps)
|
||||||
|
Security: AAC@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Static method, this method belongs to the class
|
||||||
|
@staticmethod
|
||||||
|
|
||||||
|
# The command name, must much the service tag (and by extension the service folder)
|
||||||
|
@click.command(name="SPOT", short_help="https://open.spotify.com", help=__doc__)
|
||||||
|
|
||||||
|
# Using track/playlist/album/artist page URL
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
|
||||||
|
# Pass the context back to the CLI with arguments
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return SPOT(ctx, **kwargs)
|
||||||
|
|
||||||
|
# Accept the CLI arguments by overriding the constructor (The __init__() method)
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
|
||||||
|
# Pass the title argument to self so it's accessable across all methods
|
||||||
|
self.title = title
|
||||||
|
self.is_premium = False
|
||||||
|
|
||||||
|
# Overriding the constructor
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
|
||||||
|
# Defining an authinticate function
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar], credential: Optional[Credential] = None):
|
||||||
|
|
||||||
|
# Check for cookies
|
||||||
|
if not cookies:
|
||||||
|
raise Exception("Cookies are required for performing this action.")
|
||||||
|
|
||||||
|
# Authenticate using Cookies
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
'accept': 'application/json',
|
||||||
|
'accept-language': 'en',
|
||||||
|
"app-platform": "WebPlayer",
|
||||||
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
home_page = self.session.get("https://open.spotify.com/").text
|
||||||
|
token = re.search(r'accessToken":"(.*?)"', home_page).group(1)
|
||||||
|
self.is_premium = re.search(r'isPremium":(.*?),', home_page).group(1) == 'true'
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"authorization": f"Bearer {token}",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Function to determine the type of collection
|
||||||
|
def getCollectionTypeAndId(self):
|
||||||
|
|
||||||
|
_type = self.title.split("open.spotify.com/")[1].split("/")[0]
|
||||||
|
_id = self.title.split(_type + "/")[1].split("?")[0]
|
||||||
|
return _type, _id
|
||||||
|
|
||||||
|
# Defining a function to return titles
|
||||||
|
def get_titles(self):
|
||||||
|
|
||||||
|
songs = []
|
||||||
|
_type, _id = self.getCollectionTypeAndId()
|
||||||
|
|
||||||
|
if _type == 'album':
|
||||||
|
album = self.session.get(self.config['endpoints']['albums'].format(id=_id)).json()
|
||||||
|
album_next_url = album["tracks"]["next"]
|
||||||
|
while album_next_url is not None:
|
||||||
|
album_next = self.session.get(album_next_url).json()
|
||||||
|
album["tracks"]["items"].extend(album_next["items"])
|
||||||
|
album_next_url = album_next["next"]
|
||||||
|
|
||||||
|
# Get the episode metadata by iterating through each season id
|
||||||
|
for song in album["tracks"]["items"]:
|
||||||
|
|
||||||
|
# Set a class for each song
|
||||||
|
song_class = Song(
|
||||||
|
id_=song["id"],
|
||||||
|
name=song["name"],
|
||||||
|
artist=", ".join([ artist["name"] for artist in song["artists"] ]),
|
||||||
|
album=album["name"],
|
||||||
|
track=song["track_number"],
|
||||||
|
disc=song["disc_number"],
|
||||||
|
year=int(album["release_date"][:4].strip()),
|
||||||
|
service=self.__class__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append it to the list
|
||||||
|
songs.append(song_class)
|
||||||
|
|
||||||
|
elif _type == "playlist":
|
||||||
|
playlist = self.session.get(
|
||||||
|
self.config['endpoints']['playlists'].format(id=_id)
|
||||||
|
).json()
|
||||||
|
playlist_next_url = playlist["tracks"]["next"]
|
||||||
|
while playlist_next_url is not None:
|
||||||
|
playlist_next = self.session.get(playlist_next_url).json()
|
||||||
|
playlist["tracks"]["items"].extend(playlist_next["items"])
|
||||||
|
playlist_next_url = playlist_next["next"]
|
||||||
|
|
||||||
|
# Get the episode metadata by iterating through each season id
|
||||||
|
for song in playlist["tracks"]["items"]:
|
||||||
|
|
||||||
|
song = song["track"]
|
||||||
|
# Set a class for each song
|
||||||
|
song_class = Song(
|
||||||
|
id_=song["id"],
|
||||||
|
name=song["name"],
|
||||||
|
artist=", ".join([ artist["name"] for artist in song["artists"] ]),
|
||||||
|
album=song["album"]["name"],
|
||||||
|
track=song["track_number"],
|
||||||
|
disc=song["disc_number"],
|
||||||
|
year=int(song["album"]["release_date"][:4].strip()),
|
||||||
|
service=self.__class__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append it to the list
|
||||||
|
songs.append(song_class)
|
||||||
|
|
||||||
|
elif _type == "artist":
|
||||||
|
playlist = self.session.get(
|
||||||
|
self.config['endpoints']['artists'].format(id=_id)
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Get the episode metadata by iterating through each season id
|
||||||
|
for song in playlist["tracks"]:
|
||||||
|
|
||||||
|
# Set a class for each song
|
||||||
|
song_class = Song(
|
||||||
|
id_=song["id"],
|
||||||
|
name=song["name"],
|
||||||
|
artist=", ".join([ artist["name"] for artist in song["artists"] ]),
|
||||||
|
album=song["album"]["name"],
|
||||||
|
track=song["track_number"],
|
||||||
|
disc=song["disc_number"],
|
||||||
|
year=int(song["album"]["release_date"][:4].strip()),
|
||||||
|
service=self.__class__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append it to the list
|
||||||
|
songs.append(song_class)
|
||||||
|
|
||||||
|
elif _type == "track":
|
||||||
|
song = self.session.get(
|
||||||
|
self.config['endpoints']['tracks'].format(id=_id)
|
||||||
|
).json()
|
||||||
|
|
||||||
|
# Set a class for each song
|
||||||
|
song_class = Song(
|
||||||
|
id_=song["id"],
|
||||||
|
name=song["name"],
|
||||||
|
artist=", ".join([ artist["name"] for artist in song["artists"] ]),
|
||||||
|
album=song["album"]["name"],
|
||||||
|
track=song["track_number"],
|
||||||
|
disc=song["disc_number"],
|
||||||
|
year=int(song["album"]["release_date"][:4].strip()),
|
||||||
|
service=self.__class__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append it to the list
|
||||||
|
songs.append(song_class)
|
||||||
|
|
||||||
|
return Album(songs)
|
||||||
|
|
||||||
|
# Get DRM
|
||||||
|
def get_spotify_drm(self) -> DRM_T:
|
||||||
|
pssh = requests.get(
|
||||||
|
self.config['endpoints']['pssh'].format(file_id=self.file_id)
|
||||||
|
).json()["pssh"]
|
||||||
|
return Widevine(
|
||||||
|
pssh=PSSH(pssh)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Defining a function to get tracks
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
|
||||||
|
self.audio_quality = "MP4_256_DUAL" if self.is_premium else "MP4_128_DUAL"
|
||||||
|
|
||||||
|
# Get FileID
|
||||||
|
gid = hex(base62.decode(title.id, base62.CHARSET_INVERTED))[2:].zfill(32)
|
||||||
|
metadata = self.session.get(
|
||||||
|
self.config['endpoints']['metadata'].format(gid=gid)
|
||||||
|
).json()
|
||||||
|
audio_files = metadata.get("file")
|
||||||
|
if audio_files is None:
|
||||||
|
if metadata.get("alternative") is not None:
|
||||||
|
audio_files = metadata["alternative"][0]["file"]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
self.file_id = next(
|
||||||
|
i["file_id"] for i in audio_files if i["format"] == self.audio_quality
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get stream URL
|
||||||
|
stream_url = self.session.get(
|
||||||
|
self.config['endpoints']['stream'].format(file_id=self.file_id)
|
||||||
|
).json()["cdnurl"][0] # Can change index to get different server
|
||||||
|
|
||||||
|
# Get & Set DRM
|
||||||
|
drm = [self.get_spotify_drm()]
|
||||||
|
|
||||||
|
# Set the tracks
|
||||||
|
tracks = Tracks()
|
||||||
|
tracks.add(Audio(
|
||||||
|
url=stream_url,
|
||||||
|
drm=drm,
|
||||||
|
codec=Audio.Codec.AAC,
|
||||||
|
language=metadata.get("language_of_performance", ["en"])[0],
|
||||||
|
bitrate=256000 if self.is_premium else 128000,
|
||||||
|
channels=2
|
||||||
|
))
|
||||||
|
|
||||||
|
# Return the tracks
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
# Defining a function to get chapters
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Defining a function to get widevine license keys
|
||||||
|
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||||
|
|
||||||
|
# Send the post request to the license server
|
||||||
|
license_raw = self.session.post(
|
||||||
|
self.config['endpoints']['license'],
|
||||||
|
data=challenge
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return the license
|
||||||
|
return base64.b64encode(license_raw.content).decode()
|
||||||
9
services/SPOT/config.yaml
Normal file
9
services/SPOT/config.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
endpoints:
|
||||||
|
albums: https://api.spotify.com/v1/albums/{id}
|
||||||
|
playlists: https://api.spotify.com/v1/playlists/{id}
|
||||||
|
artists: https://api.spotify.com/v1/artists/{id}/top-tracks
|
||||||
|
tracks: https://api.spotify.com/v1/tracks/{id}
|
||||||
|
pssh: https://seektables.scdn.co/seektable/{file_id}.json
|
||||||
|
metadata: https://spclient.wg.spotify.com/metadata/4/track/{gid}?market=from_token
|
||||||
|
stream: https://gue1-spclient.spotify.com/storage-resolve/v2/files/audio/interactive/11/{file_id}?version=10000000&product=9&platform=39&alt=json
|
||||||
|
license: https://gae2-spclient.spotify.com/widevine-license/v1/audio/license
|
||||||
230
services/STV/__init__.py
Normal file
230
services/STV/__init__.py
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any, Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
|
||||||
|
class STV(Service):
|
||||||
|
"""
|
||||||
|
Service code for STV Player streaming service (https://player.stv.tv/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title URL as input:
|
||||||
|
SERIES: https://player.stv.tv/summary/rebus
|
||||||
|
EPISODE: https://player.stv.tv/episode/2ro8/rebus
|
||||||
|
- Use the episode URL for movies:
|
||||||
|
MOVIE: https://player.stv.tv/episode/4lw7/wonder-woman-1984
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
ALIASES = ("stvplayer",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="STV", short_help="https://player.stv.tv/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> STV:
|
||||||
|
return STV(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.session.headers.update({"user-agent": "okhttp/4.11.0"})
|
||||||
|
self.base = self.config["endpoints"]["base"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
data = {
|
||||||
|
"engine_key": "S1jgssBHdk8ZtMWngK_y",
|
||||||
|
"q": self.title,
|
||||||
|
}
|
||||||
|
r = self.session.post(self.config["endpoints"]["search"], data=data)
|
||||||
|
r.raise_for_status()
|
||||||
|
results = r.json()["records"]["page"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
label = result.get("category")
|
||||||
|
if label and isinstance(label, list):
|
||||||
|
label = result["category"][0]
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("url"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("body"),
|
||||||
|
label=label,
|
||||||
|
url=result.get("url"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
kind, slug = self.parse_title(self.title)
|
||||||
|
self.session.headers.update({"stv-drm": "true"})
|
||||||
|
|
||||||
|
if kind == "episode":
|
||||||
|
r = self.session.get(self.base + f"episodes/{slug}")
|
||||||
|
r.raise_for_status()
|
||||||
|
episode = r.json()["results"]
|
||||||
|
|
||||||
|
if episode.get("genre").lower() == "movie":
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=episode["video"].get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
year=None,
|
||||||
|
name=episode.get("title"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=episode["video"].get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode["programme"].get("name"),
|
||||||
|
season=int(episode["playerSeries"]["name"].split(" ")[1])
|
||||||
|
if episode.get("playerSeries") and re.match(r"Series \d+", episode["playerSeries"]["name"])
|
||||||
|
else 0,
|
||||||
|
number=int(episode.get("number", 0)),
|
||||||
|
name=episode.get("title"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
elif kind == "summary":
|
||||||
|
r = self.session.get(self.base + f"programmes/{slug}")
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
series = [series.get("guid") for series in data["results"]["series"]]
|
||||||
|
seasons = [self.session.get(self.base + f"episodes?series.guid={i}").json() for i in series]
|
||||||
|
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=episode["video"].get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["results"].get("name"),
|
||||||
|
season=int(episode["playerSeries"]["name"].split(" ")[1])
|
||||||
|
if episode.get("playerSeries") and re.match(r"Series \d+", episode["playerSeries"]["name"])
|
||||||
|
else 0,
|
||||||
|
number=int(episode.get("number", 0)),
|
||||||
|
name=episode.get("title"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for season in seasons
|
||||||
|
for episode in season["results"]
|
||||||
|
]
|
||||||
|
|
||||||
|
self.session.headers.pop("stv-drm")
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
self.drm = title.data["programme"].get("drmEnabled")
|
||||||
|
headers = self.config["headers"]["drm"] if self.drm else self.config["headers"]["clear"]
|
||||||
|
accounts = self.config["accounts"]["drm"] if self.drm else self.config["accounts"]["clear"]
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["playback"].format(accounts=accounts, id=title.id),
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
if not r.ok:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
source_manifest = next(
|
||||||
|
(source["src"] for source in data["sources"] if source.get("type") == "application/dash+xml"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.license = None
|
||||||
|
if self.drm:
|
||||||
|
key_systems = next((
|
||||||
|
source
|
||||||
|
for source in data["sources"]
|
||||||
|
if source.get("type") == "application/dash+xml"
|
||||||
|
and source.get("key_systems").get("com.widevine.alpha")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.license = key_systems["key_systems"]["com.widevine.alpha"]["license_url"] if key_systems else None
|
||||||
|
|
||||||
|
manifest = self.trim_duration(source_manifest)
|
||||||
|
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["representation"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
cue_points = title.data.get("_cuePoints")
|
||||||
|
if not cue_points:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
return Chapters([Chapter(timestamp=int(cue)) for cue in cue_points])
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
if not self.license:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_title(title: str) -> tuple[str, str]:
|
||||||
|
parsed_url = urlparse(title).path.split("/")
|
||||||
|
kind, slug = parsed_url[1], parsed_url[2]
|
||||||
|
if kind not in ["episode", "summary"]:
|
||||||
|
raise ValueError("Failed to parse title - is the URL correct?")
|
||||||
|
|
||||||
|
return kind, slug
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def trim_duration(source_manifest: str) -> str:
|
||||||
|
"""
|
||||||
|
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||||
|
So we trim the duration by exactly one segment to account for that.
|
||||||
|
|
||||||
|
TODO: Calculate the segment duration instead of assuming length.
|
||||||
|
"""
|
||||||
|
manifest = DASH.from_url(source_manifest).manifest
|
||||||
|
period_duration = manifest.get("mediaPresentationDuration")
|
||||||
|
period_duration = DASH.pt_to_sec(period_duration)
|
||||||
|
|
||||||
|
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||||
|
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||||
|
manifest.set("mediaPresentationDuration", new_duration)
|
||||||
|
|
||||||
|
return etree.tostring(manifest, encoding="unicode")
|
||||||
20
services/STV/config.yaml
Normal file
20
services/STV/config.yaml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
accounts:
|
||||||
|
drm: "6204867266001"
|
||||||
|
clear: "1486976045"
|
||||||
|
|
||||||
|
headers:
|
||||||
|
drm:
|
||||||
|
BCOV-POLICY: BCpkADawqM32Q7lZg8ME0ydIOV8bD_9Ke2YD5wvY_T2Rq2TBtz6QQfpHtSAJTiDL-MiYAxyJVvScaKt82d1Q6b_wP6MG-O8SGQjRnwczfdsTesTZy-uj23uKv1vjHijtTeQC0DONN53zS38v
|
||||||
|
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||||
|
Host: edge.api.brightcove.com
|
||||||
|
Connection: keep-alive
|
||||||
|
clear:
|
||||||
|
BCOV-POLICY: BCpkADawqM2Dpx-ht5hP1rQqWFTcOTqTT5x5bSUlY8FaOO1_P8LcKxmL2wrFzTvRb3HzO2YTIzVDuoeLfqvFvp1dWRPnxKT8zt9ErkENYteaU9T6lz7OogjL8W8
|
||||||
|
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||||
|
Host: edge.api.brightcove.com
|
||||||
|
Connection: keep-alive
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base: https://player.api.stv.tv/v1/
|
||||||
|
playback: https://edge.api.brightcove.com/playback/v1/accounts/{accounts}/videos/{id}
|
||||||
|
search: https://api.swiftype.com/api/v1/public/engines/search.json
|
||||||
454
services/TFC/__init__.py
Normal file
454
services/TFC/__init__.py
Normal file
@ -0,0 +1,454 @@
|
|||||||
|
import re
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Union, Generator, Optional
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
|
||||||
|
import click
|
||||||
|
import requests
|
||||||
|
from langcodes import Language
|
||||||
|
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Tracks, Chapters, Subtitle, Chapter
|
||||||
|
from devine.core.tracks.attachment import Attachment
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.downloaders import curl_impersonate, n_m3u8dl_re
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.filterwarnings("ignore", message="chunk_size is ignored")
|
||||||
|
|
||||||
|
|
||||||
|
class TFC(Service):
|
||||||
|
"""
|
||||||
|
Service code for iWantTFC
|
||||||
|
Written by @sp4rk.y
|
||||||
|
|
||||||
|
Authorization: Cookies (Free and Paid Titles)
|
||||||
|
Security: FHD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="TFC", short_help="https://www.iwanttfc.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option("-m", "--movie", is_flag=True, default=False, help="Title is a Movie.")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return TFC(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str, movie: bool):
|
||||||
|
self.title = title
|
||||||
|
self.is_movie = movie
|
||||||
|
|
||||||
|
self.credential = None
|
||||||
|
self.token = None
|
||||||
|
self.refresh_token = None
|
||||||
|
self.token_expiry = None
|
||||||
|
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
if self.credential is None and credential:
|
||||||
|
self.credential = credential
|
||||||
|
|
||||||
|
# Check for cached token
|
||||||
|
cache_file = Path(config.directories.cache / self.__class__.__name__ / "token.json")
|
||||||
|
|
||||||
|
# Check if session cache exists and is still valid
|
||||||
|
if cache_file.exists():
|
||||||
|
try:
|
||||||
|
with open(cache_file, "r", encoding="utf-8") as f:
|
||||||
|
session_data = json.load(f)
|
||||||
|
if session_data.get("token_expiry", 0) > time.time():
|
||||||
|
self.log.info(" + Using cached authentication token...")
|
||||||
|
self.token = session_data["token"]
|
||||||
|
self.refresh_token = session_data.get("refresh_token")
|
||||||
|
self.token_expiry = session_data["token_expiry"]
|
||||||
|
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||||
|
|
||||||
|
# If there are cookies stored, restore them
|
||||||
|
if "cookies" in session_data:
|
||||||
|
for cookie_name, cookie_value in session_data["cookies"].items():
|
||||||
|
self.session.cookies.set(cookie_name, cookie_value)
|
||||||
|
|
||||||
|
return self.token
|
||||||
|
else:
|
||||||
|
self.log.info(" + Token expired, re-authenticating...")
|
||||||
|
except (json.JSONDecodeError, KeyError) as e:
|
||||||
|
self.log.warning(f" + Error reading cache: {e}")
|
||||||
|
|
||||||
|
self.log.info(" + Authenticating with TFC...")
|
||||||
|
base_headers = {
|
||||||
|
"User-Agent": self.config["browser"]["headers"]["user-agent"],
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
"Accept": "application/json, text/plain, */*",
|
||||||
|
"Origin": "https://www.iwanttfc.com",
|
||||||
|
"Referer": "https://www.iwanttfc.com/",
|
||||||
|
}
|
||||||
|
self.session.headers.update(base_headers)
|
||||||
|
data_auth = (
|
||||||
|
f"password={self.credential.password}&email={self.credential.username}&deviceID={self.config['UUID']}"
|
||||||
|
)
|
||||||
|
r1 = self.session.post(self.config["endpoints"]["api_login"], data=data_auth)
|
||||||
|
r1.raise_for_status()
|
||||||
|
j1 = r1.json()
|
||||||
|
user_auth_1 = j1.get("UserAuthentication")
|
||||||
|
if not user_auth_1:
|
||||||
|
raise ValueError("No UserAuthentication from /user/auth")
|
||||||
|
|
||||||
|
login_url = self.config["endpoints"].get("user_login", "https://www.iwanttfc.com/api/1.0/user/login")
|
||||||
|
r2 = self.session.post(login_url, headers={"UserAuthentication": user_auth_1})
|
||||||
|
r2.raise_for_status()
|
||||||
|
|
||||||
|
r3 = self.session.post(
|
||||||
|
self.config["endpoints"]["api_login"], data=data_auth, headers={"UserAuthentication": user_auth_1}
|
||||||
|
)
|
||||||
|
r3.raise_for_status()
|
||||||
|
j3 = r3.json()
|
||||||
|
user_auth_2 = j3.get("UserAuthentication")
|
||||||
|
if not user_auth_2:
|
||||||
|
raise ValueError("No UserAuthentication from second /user/auth")
|
||||||
|
|
||||||
|
user_rights_url = self.config["endpoints"].get("user_rights", "https://www.iwanttfc.com/api/1.0/user/rights")
|
||||||
|
r4 = self.session.get(user_rights_url, headers={"UserAuthentication": user_auth_2})
|
||||||
|
r4.raise_for_status()
|
||||||
|
|
||||||
|
self.token = user_auth_2
|
||||||
|
self.refresh_token = j1.get("refreshToken", None)
|
||||||
|
# TFC tokens typically expire in 24 hours (86400 seconds), but we'll set it to just under 24 hours to be safe
|
||||||
|
self.token_expiry = time.time() + 86300
|
||||||
|
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||||
|
|
||||||
|
# Cache the token and cookies
|
||||||
|
try:
|
||||||
|
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(cache_file, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(
|
||||||
|
{
|
||||||
|
"token": self.token,
|
||||||
|
"refresh_token": self.refresh_token,
|
||||||
|
"token_expiry": self.token_expiry,
|
||||||
|
"cookies": {k: v for k, v in self.session.cookies.items()},
|
||||||
|
},
|
||||||
|
f,
|
||||||
|
)
|
||||||
|
self.log.info(" + Authentication token and cookies cached successfully")
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f" + Failed to cache token: {e}")
|
||||||
|
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
query = self.title
|
||||||
|
headers = {**self.config["search"]["headers"], "Authorization": f"Bearer {self.token}"}
|
||||||
|
data = {
|
||||||
|
"requests": [
|
||||||
|
{"query": query, "indexName": "www_iwanttfc_com_items", "params": "hitsPerPage=200"},
|
||||||
|
{"query": query, "indexName": "www_iwanttfc_com_tag_id_cast", "params": "hitsPerPage=200"},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.session.post(self.config["endpoints"]["api_search"], headers=headers, json=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
results = response.json()["results"]
|
||||||
|
|
||||||
|
for result in results[0]["hits"]:
|
||||||
|
title = result.get("title", {}).get("en", "")
|
||||||
|
if not title:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
js_value = self.get_js_value()
|
||||||
|
if not js_value:
|
||||||
|
self.log.warning("Could not get JS value for detail URL")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Use the direct item ID instead of objectID for detail lookup
|
||||||
|
item_id = result.get("id") or result["objectID"]
|
||||||
|
detail_url = self.config["endpoints"]["api_playback"].format(js=js_value, id=item_id)
|
||||||
|
|
||||||
|
detail_response = self.session.get(
|
||||||
|
detail_url,
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {self.token}",
|
||||||
|
"Accept": "application/json",
|
||||||
|
"Origin": "https://www.iwanttfc.com",
|
||||||
|
"Referer": "https://www.iwanttfc.com/",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
detail_response.raise_for_status()
|
||||||
|
detail_data = detail_response.json()
|
||||||
|
|
||||||
|
description = detail_data.get("description", {}).get("en", "")
|
||||||
|
if description:
|
||||||
|
description = description[:200] + "..."
|
||||||
|
|
||||||
|
media_type = "TV" if "children" in detail_data else "Movie"
|
||||||
|
year = detail_data.get("release_year")
|
||||||
|
episode_count = 0
|
||||||
|
|
||||||
|
if media_type == "TV":
|
||||||
|
episode_count = len(
|
||||||
|
[episode for episode in detail_data.get("children", []) if "-tlr" not in episode["id"]]
|
||||||
|
)
|
||||||
|
|
||||||
|
label = media_type
|
||||||
|
if year:
|
||||||
|
label += f" ({year})"
|
||||||
|
if media_type == "TV":
|
||||||
|
label += f" {episode_count} Episode{'' if episode_count == 1 else 's'}"
|
||||||
|
|
||||||
|
yield SearchResult(id_=item_id, title=title, description=description, label=label)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
self.log.warning(f"Failed to get details for {result.get('id', result['objectID'])}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
def get_js_value(self) -> Optional[str]:
|
||||||
|
for _ in curl_impersonate(
|
||||||
|
urls="https://www.iwanttfc.com/#!/browse",
|
||||||
|
output_dir=config.directories.temp,
|
||||||
|
filename="browse_page.html",
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
html_path = config.directories.temp / "browse_page.html"
|
||||||
|
with html_path.open("r", encoding="utf8") as f:
|
||||||
|
html_content = f.read()
|
||||||
|
|
||||||
|
match = re.search(r'src="https://absprod-static.iwanttfc.com/c/6/catalog/(.*?)/script.js', html_content)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
headers = self.config["browser"]["headers"]
|
||||||
|
try:
|
||||||
|
title_metadata = requests.get(
|
||||||
|
self.config["endpoints"]["api_playback"].format(js=self.get_js_value(), id=self.title), headers=headers
|
||||||
|
).json()
|
||||||
|
except ValueError:
|
||||||
|
self.log.warning("Show title does not exist.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if "children" in title_metadata:
|
||||||
|
episodes = []
|
||||||
|
for episode in title_metadata.get("children", []):
|
||||||
|
episode_id = episode["id"]
|
||||||
|
|
||||||
|
match = re.match(r".*-s(\d+)e(\d+)$", episode_id, re.IGNORECASE)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
season, number = map(int, match.groups())
|
||||||
|
|
||||||
|
episode_obj = Episode(
|
||||||
|
id_=episode_id,
|
||||||
|
title=title_metadata.get("title", {}).get("en"),
|
||||||
|
season=season,
|
||||||
|
number=number,
|
||||||
|
language="fil",
|
||||||
|
year=title_metadata.get("release_year"),
|
||||||
|
service=self.__class__,
|
||||||
|
)
|
||||||
|
episodes.append(episode_obj)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
else:
|
||||||
|
movie_name = title_metadata.get("title", {}).get("en")
|
||||||
|
movie_year = title_metadata.get("release_year")
|
||||||
|
|
||||||
|
movie_class = Movie(
|
||||||
|
id_=self.title,
|
||||||
|
name=movie_name,
|
||||||
|
year=movie_year,
|
||||||
|
service=self.__class__,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Movies([movie_class])
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
if not title.data:
|
||||||
|
episode_data = requests.get(
|
||||||
|
self.config["endpoints"]["api_playback"].format(js=self.get_js_value(), id=title.id)
|
||||||
|
).json()
|
||||||
|
title.data = episode_data
|
||||||
|
else:
|
||||||
|
episode_data = title.data
|
||||||
|
|
||||||
|
mpd_urls = episode_data.get("media", {}).get("mpds", [])
|
||||||
|
|
||||||
|
subtitle_data = [
|
||||||
|
(
|
||||||
|
urljoin(self.config["endpoints"]["api_subtitle"], caption.get("id")) + ".vtt",
|
||||||
|
caption.get("lang"),
|
||||||
|
)
|
||||||
|
for caption in episode_data.get("media", {}).get("captions", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
tracks = Tracks()
|
||||||
|
for mpd_url in mpd_urls:
|
||||||
|
mpd_tracks = DASH.from_url(url=mpd_url, session=self.session).to_tracks(language=title.language or "fil")
|
||||||
|
for track in mpd_tracks:
|
||||||
|
if not tracks.exists(by_id=track.id):
|
||||||
|
track.data["episode_id"] = episode_data.get("id")
|
||||||
|
tracks.add(track)
|
||||||
|
|
||||||
|
# Force Filipino language on all audio tracks regardless of what's in the MPD
|
||||||
|
for track in tracks.audio:
|
||||||
|
mpd_lang = title.language or "fil"
|
||||||
|
track.language = Language.get(mpd_lang)
|
||||||
|
track.is_original_lang = True
|
||||||
|
|
||||||
|
# Force Filipino language on all video tracks regardless of what's in the MPD
|
||||||
|
for track in tracks.videos:
|
||||||
|
mpd_lang = title.language or "fil"
|
||||||
|
track.language = Language.get(mpd_lang)
|
||||||
|
track.is_original_lang = True
|
||||||
|
|
||||||
|
self.log.debug(f"Processing {len(subtitle_data)} subtitle tracks")
|
||||||
|
for subtitle_url, language in subtitle_data:
|
||||||
|
try:
|
||||||
|
subtitle_session = requests.Session()
|
||||||
|
subtitle_session.headers.update(
|
||||||
|
{
|
||||||
|
"User-Agent": self.config["browser"]["headers"]["user-agent"],
|
||||||
|
"Referer": self.config["browser"]["headers"]["Referer"],
|
||||||
|
"Origin": self.config["browser"]["headers"]["Origin"],
|
||||||
|
"sec-ch-ua": self.config["browser"]["headers"]["sec-ch-ua"],
|
||||||
|
"sec-ch-ua-mobile": self.config["browser"]["headers"]["sec-ch-ua-mobile"],
|
||||||
|
"sec-ch-ua-platform": self.config["browser"]["headers"]["sec-ch-ua-platform"],
|
||||||
|
"DNT": self.config["browser"]["headers"]["DNT"],
|
||||||
|
"sec-fetch-dest": "empty",
|
||||||
|
"sec-fetch-mode": "cors",
|
||||||
|
"sec-fetch-site": "same-site",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if subtitle URL is valid before creating the track
|
||||||
|
self.log.debug(f"Checking subtitle URL: {subtitle_url}")
|
||||||
|
response = subtitle_session.get(url=subtitle_url, stream=True, timeout=5)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
subtitle_id = subtitle_url.split("/")[-1].split(".")[0]
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=subtitle_id,
|
||||||
|
url=subtitle_url,
|
||||||
|
codec=Subtitle.Codec.WebVTT,
|
||||||
|
language=language,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
self.log.warning(f"Subtitle URL not accessible: {subtitle_url} (HTTP {e.response.status_code})")
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
self.log.warning(f"Request error for subtitle {subtitle_url}: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f"Unexpected error processing subtitle {subtitle_url}: {e}")
|
||||||
|
|
||||||
|
chapters = self.get_chapters(title)
|
||||||
|
tracks.chapters = Chapters(chapters)
|
||||||
|
|
||||||
|
thumbnail_id = episode_data.get("thumbnail") or episode_data.get("poster") or episode_data.get("thumb")
|
||||||
|
if not thumbnail_id:
|
||||||
|
images = episode_data.get("images", [])
|
||||||
|
if images:
|
||||||
|
thumbnail_data = images[0]
|
||||||
|
thumbnail_id = thumbnail_data.get("id") or thumbnail_data.get("url").split("/")[-1].split(".")[0]
|
||||||
|
|
||||||
|
if thumbnail_id:
|
||||||
|
thumbnail_base_url = self.config["endpoints"]["api_thumbnail"]
|
||||||
|
thumbnail_url = f"{thumbnail_base_url}{thumbnail_id}.jpg"
|
||||||
|
thumbnail_response = self.session.get(thumbnail_url)
|
||||||
|
if thumbnail_response.status_code == 200:
|
||||||
|
thumbnail_filename = f"{title.id}_thumbnail.jpg"
|
||||||
|
thumbnail_path = config.directories.temp / thumbnail_filename
|
||||||
|
|
||||||
|
os.makedirs(config.directories.temp, exist_ok=True)
|
||||||
|
|
||||||
|
with open(thumbnail_path, "wb") as f:
|
||||||
|
f.write(thumbnail_response.content)
|
||||||
|
|
||||||
|
thumbnail_attachment = Attachment(
|
||||||
|
path=thumbnail_path,
|
||||||
|
name=thumbnail_filename,
|
||||||
|
mime_type="image/jpeg",
|
||||||
|
description="Thumbnail",
|
||||||
|
)
|
||||||
|
|
||||||
|
tracks.attachments.append(thumbnail_attachment)
|
||||||
|
else:
|
||||||
|
self.log.warning("Thumbnail not found for title.")
|
||||||
|
|
||||||
|
if not tracks.subtitles:
|
||||||
|
self.log.error("No Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
if not any(sub.language.language == "en" for sub in tracks.subtitles):
|
||||||
|
self.log.error("No English Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
if track not in tracks.attachments:
|
||||||
|
track.downloader = n_m3u8dl_re
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
if isinstance(title, Episode) and not title.data:
|
||||||
|
episode_data = requests.get(
|
||||||
|
self.config["endpoints"]["api_playback"].format(js=self.get_js_value(), id=title.id)
|
||||||
|
).json()
|
||||||
|
title.data = episode_data
|
||||||
|
|
||||||
|
cuepoints = title.data.get("cuepoints", [])
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
unique_cuepoints = []
|
||||||
|
for cp in cuepoints:
|
||||||
|
if cp not in seen:
|
||||||
|
seen.add(cp)
|
||||||
|
unique_cuepoints.append(cp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
sorted_cuepoints = sorted(unique_cuepoints, key=lambda x: datetime.strptime(x, "%H:%M:%S.%f"))
|
||||||
|
except ValueError as e:
|
||||||
|
self.log.error(f"Error parsing cuepoints: {e}")
|
||||||
|
sorted_cuepoints = []
|
||||||
|
|
||||||
|
chapters = [Chapter(name="Chapter 1", timestamp="00:00:00.000")]
|
||||||
|
|
||||||
|
for i, cuepoint in enumerate(sorted_cuepoints, start=2):
|
||||||
|
try:
|
||||||
|
timestamp = datetime.strptime(cuepoint, "%H:%M:%S.%f").time()
|
||||||
|
chapters.append(Chapter(name=f"Chapter {i}", timestamp=timestamp.strftime("%H:%M:%S.%f")[:-3]))
|
||||||
|
except ValueError:
|
||||||
|
self.log.warning(f"Invalid cuepoint format: {cuepoint}")
|
||||||
|
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, challenge: bytes, track: AnyTrack, *_, **__) -> bytes | str:
|
||||||
|
# TODO: Cache the returned service cert
|
||||||
|
return self.get_widevine_license(challenge, track)
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, track: AnyTrack, *_, **__) -> bytes:
|
||||||
|
episode_id = track.data.get("episode_id")
|
||||||
|
license_url = self.config["endpoints"]["api_license"]
|
||||||
|
license_url += f"?itemID={episode_id}"
|
||||||
|
license_url += f"&UserAuthentication={self.token}"
|
||||||
|
license_url += "&build=52b61137ff3af37f55e0"
|
||||||
|
return self.session.post(url=license_url, data=challenge).content
|
||||||
35
services/TFC/config.yaml
Normal file
35
services/TFC/config.yaml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
endpoints:
|
||||||
|
api_login: "https://www.iwanttfc.com/api/1.0/user/auth"
|
||||||
|
api_playback: "https://absprod-static.iwanttfc.com/c/6/catalog/{js}/item/{id}.json"
|
||||||
|
api_license: "https://www.iwanttfc.com/api/1.0/license"
|
||||||
|
api_subtitle: "https://absprod-static.iwanttfc.com/c/6/captions/{videoid}"
|
||||||
|
api_search: "https://4nzqf4xnoy-dsn.algolia.net/1/indexes/*/queries?x-algolia-agent=Algolia%20for%20JavaScript%20(4.11.0)%3B%20Browser"
|
||||||
|
api_thumbnail: "https://absprod-static.iwanttfc.com/c/6/images/"
|
||||||
|
|
||||||
|
UUID: "50b0a188-d85f-4191-90f3-94f3f78a4f20"
|
||||||
|
|
||||||
|
browser:
|
||||||
|
headers:
|
||||||
|
sec-ch-ua: '"Not(A:Brand";v="99", "Microsoft Edge";v="133", "Chromium";v="133"'
|
||||||
|
Accept: "application/json, text/plain, */*"
|
||||||
|
Referer: "https://www.iwanttfc.com/"
|
||||||
|
Origin: "https://www.iwanttfc.com/"
|
||||||
|
sec-ch-ua-mobile: "?0"
|
||||||
|
user-agent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0'
|
||||||
|
sec-ch-ua-platform: '"Windows"'
|
||||||
|
sec-fetch-dest: "empty"
|
||||||
|
sec-fetch-mode: "cors"
|
||||||
|
sec-fetch-site: "same-site"
|
||||||
|
DNT: "1"
|
||||||
|
|
||||||
|
search:
|
||||||
|
headers:
|
||||||
|
x-algolia-api-key: "e2f539d702376992de12e2042cdcda01"
|
||||||
|
sec-ch-ua-platform: "Windows"
|
||||||
|
User-Agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36"
|
||||||
|
x-algolia-application-id: "4NZQF4XNOY"
|
||||||
|
sec-ch-ua: 'Not A(Brand";v="8", "Chromium";v="132", "Google Chrome";v="132'
|
||||||
|
content-type: "application/x-www-form-urlencoded"
|
||||||
|
sec-ch-ua-mobile: "?0"
|
||||||
|
Referer: "https://www.iwanttfc.com/"
|
||||||
|
|
||||||
211
services/TUBI/__init__.py
Normal file
211
services/TUBI/__init__.py
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import click
|
||||||
|
import m3u8
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.downloaders import requests
|
||||||
|
from devine.core.manifests import HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Subtitle, Track, Tracks
|
||||||
|
from langcodes import Language
|
||||||
|
|
||||||
|
|
||||||
|
class TUBI(Service):
|
||||||
|
"""
|
||||||
|
Service code for TubiTV streaming service (https://tubitv.com/)
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Cookies
|
||||||
|
Robustness:
|
||||||
|
Widevine:
|
||||||
|
L3: 720p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be complete title URL or just the path:
|
||||||
|
/series/300001423/gotham
|
||||||
|
/tv-shows/200024793/s01-e01-pilot
|
||||||
|
/movies/589279/the-outsiders
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?tubitv\.com?)?/(?P<type>movies|series|tv-shows)/(?P<id>[a-z0-9-]+)"
|
||||||
|
GEOFENCE = ("us", "ca",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="TUBI", short_help="https://tubitv.com/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return TUBI(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.license = None
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not cookies:
|
||||||
|
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||||
|
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"isKidsMode": "false",
|
||||||
|
"useLinearHeader": "true",
|
||||||
|
"isMobile": "false",
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"].format(query=self.title), params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
results = r.json()
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
label = "series" if result["type"] == "s" else "movies" if result["type"] == "v" else result["type"]
|
||||||
|
title = (
|
||||||
|
result.get("title", "")
|
||||||
|
.lower()
|
||||||
|
.replace(" ", "-")
|
||||||
|
.replace(":", "")
|
||||||
|
.replace("(", "")
|
||||||
|
.replace(")", "")
|
||||||
|
.replace(".", "")
|
||||||
|
)
|
||||||
|
yield SearchResult(
|
||||||
|
id_=f"https://tubitv.com/{label}/{result.get('id')}/{title}",
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("description"),
|
||||||
|
label=label,
|
||||||
|
url=f"https://tubitv.com/{label}/{result.get('id')}/{title}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
try:
|
||||||
|
kind, content_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
if kind == "tv-shows":
|
||||||
|
content = self.session.get(self.config["endpoints"]["content"].format(content_id=content_id))
|
||||||
|
content.raise_for_status()
|
||||||
|
series_id = "0" + content.json().get("series_id")
|
||||||
|
data = self.session.get(self.config["endpoints"]["content"].format(content_id=series_id)).json()
|
||||||
|
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["title"],
|
||||||
|
season=int(season["id"]),
|
||||||
|
number=int(episode["episode_number"]),
|
||||||
|
name=episode["title"].split("-")[1],
|
||||||
|
year=data["year"],
|
||||||
|
language=Language.find(episode.get("lang", "en")).to_alpha3(),
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for season in data["children"]
|
||||||
|
for episode in season["children"]
|
||||||
|
if episode["id"] == content_id
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if kind == "series":
|
||||||
|
r = self.session.get(self.config["endpoints"]["content"].format(content_id=content_id))
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
return Series(
|
||||||
|
[
|
||||||
|
Episode(
|
||||||
|
id_=episode["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
title=data["title"],
|
||||||
|
season=int(season["id"]),
|
||||||
|
number=int(episode["episode_number"]),
|
||||||
|
name=episode["title"].split("-")[1],
|
||||||
|
year=data["year"],
|
||||||
|
language=Language.find(episode.get("lang") or "en").to_alpha3(),
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for season in data["children"]
|
||||||
|
for episode in season["children"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if kind == "movies":
|
||||||
|
r = self.session.get(self.config["endpoints"]["content"].format(content_id=content_id))
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=data["id"],
|
||||||
|
service=self.__class__,
|
||||||
|
year=data["year"],
|
||||||
|
name=data["title"],
|
||||||
|
language=Language.find(data.get("lang", "en")).to_alpha3(),
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
if not title.data.get("video_resources"):
|
||||||
|
raise ValueError("No video resources found. Title is either missing or geolocation is incorrect.")
|
||||||
|
|
||||||
|
self.manifest = title.data["video_resources"][0]["manifest"]["url"]
|
||||||
|
self.license = title.data["video_resources"][0].get("license_server", {}).get("url")
|
||||||
|
|
||||||
|
tracks = HLS.from_url(url=self.manifest, session=self.session).to_tracks(language=title.language)
|
||||||
|
for track in tracks:
|
||||||
|
master = m3u8.loads(self.session.get(track.url).text, uri=track.url)
|
||||||
|
track.url = urljoin(master.base_uri, master.segments[0].uri)
|
||||||
|
track.descriptor = Track.Descriptor.URL
|
||||||
|
|
||||||
|
if title.data.get("subtitles"):
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(title.data["subtitles"][0]["url"].encode()).hexdigest()[0:6],
|
||||||
|
url=title.data["subtitles"][0]["url"],
|
||||||
|
codec=Subtitle.Codec.from_mime(title.data["subtitles"][0]["url"][-3:]),
|
||||||
|
language=title.data["subtitles"][0].get("lang_alpha3", title.language),
|
||||||
|
downloader=requests,
|
||||||
|
is_original_lang=True,
|
||||||
|
forced=False,
|
||||||
|
sdh=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
if not self.license:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
|
||||||
|
return r.content
|
||||||
5
services/TUBI/config.yaml
Normal file
5
services/TUBI/config.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
endpoints:
|
||||||
|
content: https://tubitv.com/oz/videos/{content_id}/content?video_resources=hlsv6_widevine_nonclearlead&video_resources=hlsv6
|
||||||
|
search: https://tubitv.com/oz/search/{query}
|
||||||
|
|
||||||
|
|
||||||
304
services/TVNZ/__init__.py
Normal file
304
services/TVNZ/__init__.py
Normal file
@ -0,0 +1,304 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from collections.abc import Generator
|
||||||
|
from datetime import timedelta
|
||||||
|
from http.cookiejar import MozillaCookieJar
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
from urllib.parse import urljoin, urlparse
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapters, Tracks
|
||||||
|
from lxml import etree
|
||||||
|
from pywidevine.cdm import Cdm as WidevineCdm
|
||||||
|
from requests import Request
|
||||||
|
|
||||||
|
|
||||||
|
class TVNZ(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for TVNZ streaming service (https://www.tvnz.co.nz).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: Credentials
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p, AAC2.0
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Input can be comlete URL or path:
|
||||||
|
SHOW: /shows/tulsa-king
|
||||||
|
EPISODE: /shows/tulsa-king/episodes/s1-e1
|
||||||
|
MOVIE: /shows/the-revenant
|
||||||
|
SPORT: /sport/tennis/wta-tour/guadalajara-open-final
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("nz",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="TVNZ", short_help="https://www.tvnz.co.nz", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> TVNZ:
|
||||||
|
return TVNZ(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.session.headers.update(self.config["headers"])
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"q": self.title.strip(),
|
||||||
|
"includeTypes": "all",
|
||||||
|
}
|
||||||
|
|
||||||
|
results = self._request("GET", "/api/v1/android/play/search", params=params)["results"]
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result["page"].get("url"),
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label=result.get("type"),
|
||||||
|
url="https://www.tvnz.co.nz" + result["page"].get("url"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||||
|
super().authenticate(cookies, credential)
|
||||||
|
if not credential:
|
||||||
|
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||||
|
|
||||||
|
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||||
|
|
||||||
|
if cache and not cache.expired:
|
||||||
|
self.log.info(" + Using cached Tokens...")
|
||||||
|
tokens = cache.data
|
||||||
|
else:
|
||||||
|
self.log.info(" + Logging in...")
|
||||||
|
payload = {"email": credential.username, "password": credential.password, "keepMeLoggedIn": True}
|
||||||
|
|
||||||
|
response = self.session.post(
|
||||||
|
self.config["endpoints"]["base_api"] + "/api/v1/androidtv/consumer/login", json=payload
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
if not response.headers.get("aat"):
|
||||||
|
raise ValueError("Failed to authenticate: " + response.text)
|
||||||
|
|
||||||
|
tokens = {
|
||||||
|
"access_token": response.headers.get("aat"),
|
||||||
|
"aft_token": response.headers.get("aft"), # ?
|
||||||
|
}
|
||||||
|
|
||||||
|
cache.set(tokens, expiration=response.headers.get("aat_expires_in"))
|
||||||
|
|
||||||
|
self.session.headers.update({"Authorization": "Bearer {}".format(tokens["access_token"])})
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
try:
|
||||||
|
path = urlparse(self.title).path
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError("Could not parse ID from title: {}".format(e))
|
||||||
|
|
||||||
|
page = self._request("GET", "/api/v4/androidtv/play/page/{}".format(path))
|
||||||
|
|
||||||
|
if page["layout"].get("video"):
|
||||||
|
title = page.get("title", "").replace("Episodes", "")
|
||||||
|
video = self._request("GET", page["layout"]["video"].get("href"))
|
||||||
|
episodes = self._episode(video, title)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
else:
|
||||||
|
module = page["layout"]["slots"]["main"]["modules"][0]
|
||||||
|
label = module.get("label", "")
|
||||||
|
lists = module.get("lists")
|
||||||
|
title = page.get("title", "").replace(label, "")
|
||||||
|
|
||||||
|
seasons = [x.get("href") for x in lists]
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
for season in seasons:
|
||||||
|
data = self._request("GET", season)
|
||||||
|
episodes.extend([x for x in data["_embedded"].values()])
|
||||||
|
|
||||||
|
while data.get("nextPage"):
|
||||||
|
data = self._request("GET", data["nextPage"])
|
||||||
|
episodes.extend([x for x in data["_embedded"].values()])
|
||||||
|
|
||||||
|
if label in ("Episodes", "Stream"):
|
||||||
|
episodes = self._show(episodes, title)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
elif label in ("Movie", "Movies"):
|
||||||
|
movie = self._movie(episodes, title)
|
||||||
|
return Movies(movie)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
metadata = title.data.get("publisherMetadata") or title.data.get("media")
|
||||||
|
if not metadata:
|
||||||
|
self.log.error("Unable to find metadata for this episode")
|
||||||
|
return
|
||||||
|
|
||||||
|
source = metadata.get("type") or metadata.get("source")
|
||||||
|
video_id = metadata.get("brightcoveVideoId") or metadata.get("id")
|
||||||
|
account_id = metadata.get("brightcoveAccountId") or metadata.get("accountId")
|
||||||
|
playback = title.data.get("playbackHref", "")
|
||||||
|
|
||||||
|
self.drm_token = None
|
||||||
|
if source != "brightcove":
|
||||||
|
data = self._request("GET", playback)
|
||||||
|
self.license = (
|
||||||
|
data["encryption"]["licenseServers"]["widevine"]
|
||||||
|
if data["encryption"].get("drmEnabled")
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
self.drm_token = data["encryption"].get("drmToken")
|
||||||
|
source_manifest = data["streaming"]["dash"].get("url")
|
||||||
|
|
||||||
|
else:
|
||||||
|
data = self._request(
|
||||||
|
"GET", self.config["endpoints"]["brightcove"].format(account_id, video_id),
|
||||||
|
headers={"BCOV-POLICY": self.config["policy"]},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.license = next((
|
||||||
|
x["key_systems"]["com.widevine.alpha"]["license_url"]
|
||||||
|
for x in data["sources"]
|
||||||
|
if x.get("key_systems").get("com.widevine.alpha")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
source_manifest = next((
|
||||||
|
x["src"] for x in data["sources"]
|
||||||
|
if x.get("key_systems").get("com.widevine.alpha")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
manifest = self.trim_duration(source_manifest)
|
||||||
|
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["representation"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return WidevineCdm.common_privacy_cert
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
if not self.license:
|
||||||
|
return None
|
||||||
|
|
||||||
|
headers = {"Authorization": f"Bearer {self.drm_token}"} if self.drm_token else self.session.headers
|
||||||
|
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific
|
||||||
|
|
||||||
|
def _show(self, episodes: list, title: str) -> Episode:
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("videoId"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=title,
|
||||||
|
season=int(episode.get("seasonNumber")) if episode.get("seasonNumber") else 0,
|
||||||
|
number=int(episode.get("episodeNumber")) if episode.get("episodeNumber") else 0,
|
||||||
|
name=episode.get("title"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
]
|
||||||
|
|
||||||
|
def _movie(self, movies: list, title: str) -> Movie:
|
||||||
|
return [
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("videoId"),
|
||||||
|
service=self.__class__,
|
||||||
|
name=title,
|
||||||
|
year=None,
|
||||||
|
language="en",
|
||||||
|
data=movie,
|
||||||
|
)
|
||||||
|
for movie in movies
|
||||||
|
]
|
||||||
|
|
||||||
|
def _episode(self, video: dict, title: str) -> Episode:
|
||||||
|
kind = video.get("type")
|
||||||
|
name = video.get("title")
|
||||||
|
|
||||||
|
if kind == "sportVideo" and video.get("_embedded"):
|
||||||
|
_type = next((x for x in video["_embedded"].values() if x.get("type") == "competition"), None)
|
||||||
|
title = _type.get("title") if _type else title
|
||||||
|
name = video.get("title", "") + " " + video.get("phase", "")
|
||||||
|
|
||||||
|
return [
|
||||||
|
Episode(
|
||||||
|
id_=video.get("videoId"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=title,
|
||||||
|
season=int(video.get("seasonNumber")) if video.get("seasonNumber") else 0,
|
||||||
|
number=int(video.get("episodeNumber")) if video.get("episodeNumber") else 0,
|
||||||
|
name=name,
|
||||||
|
language="en",
|
||||||
|
data=video,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
api: str,
|
||||||
|
params: dict = None,
|
||||||
|
headers: dict = None,
|
||||||
|
payload: dict = None,
|
||||||
|
) -> Any[dict | str]:
|
||||||
|
url = urljoin(self.config["endpoints"]["base_api"], api)
|
||||||
|
if headers:
|
||||||
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
prep = self.session.prepare_request(Request(method, url, params=params, json=payload))
|
||||||
|
response = self.session.send(prep)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(response.content)
|
||||||
|
|
||||||
|
if data.get("message"):
|
||||||
|
raise ConnectionError(f"{response.status_code} - {data.get('message')}")
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ConnectionError("Request failed: {} - {}".format(response.status_code, response.text))
|
||||||
|
|
||||||
|
def trim_duration(self, source_manifest: str) -> str:
|
||||||
|
"""
|
||||||
|
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||||
|
So we trim the duration by exactly one segment to account for that.
|
||||||
|
|
||||||
|
TODO: Calculate the segment duration instead of assuming length.
|
||||||
|
"""
|
||||||
|
manifest = DASH.from_url(source_manifest, self.session).manifest
|
||||||
|
period_duration = manifest.get("mediaPresentationDuration")
|
||||||
|
period_duration = DASH.pt_to_sec(period_duration)
|
||||||
|
|
||||||
|
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||||
|
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||||
|
manifest.set("mediaPresentationDuration", new_duration)
|
||||||
|
|
||||||
|
return etree.tostring(manifest, encoding="unicode")
|
||||||
9
services/TVNZ/config.yaml
Normal file
9
services/TVNZ/config.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
headers:
|
||||||
|
User-Agent: "AndroidTV/!/!"
|
||||||
|
x-tvnz-api-client-id: "androidtv/!.!.!"
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base_api: "https://apis-public-prod.tech.tvnz.co.nz"
|
||||||
|
brightcove: "https://edge.api.brightcove.com/playback/v1/accounts/{}/videos/{}"
|
||||||
|
|
||||||
|
policy: "BCpkADawqM0IurzupiJKMb49WkxM__ngDMJ3GOQBhN2ri2Ci_lHwDWIpf4sLFc8bANMc-AVGfGR8GJNgxGqXsbjP1gHsK2Fpkoj6BSpwjrKBnv1D5l5iGPvVYCo"
|
||||||
193
services/UKTV/__init__.py
Normal file
193
services/UKTV/__init__.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections.abc import Generator
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click import Context
|
||||||
|
from devine.core.manifests.dash import DASH
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapter, Chapters, Tracks
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
|
||||||
|
class UKTV(Service):
|
||||||
|
"""
|
||||||
|
Service code for 'U' (formerly UKTV Play) streaming service (https://u.co.uk/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Robustness:
|
||||||
|
L3: 1080p
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use complete title URL as input:
|
||||||
|
SERIES: https://u.co.uk/shows/love-me/watch-online
|
||||||
|
EPISODE: https://u.co.uk/shows/love-me/series-1/episode-1/6355269425112
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
ALIASES = ("uktvplay", "u",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="UKTV", short_help="https://u.co.uk/", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> UKTV:
|
||||||
|
return UKTV(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
self.session.headers.update({"user-agent": "okhttp/4.7.2"})
|
||||||
|
self.base = self.config["endpoints"]["base"]
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
r = self.session.get(self.base + f"search/?q={self.title}")
|
||||||
|
r.raise_for_status()
|
||||||
|
results = r.json()
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
link = "https://u.co.uk/shows/{}/watch-online"
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=link.format(result.get("slug")),
|
||||||
|
title=result.get("name"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label=result.get("type"),
|
||||||
|
url=link.format(result.get("slug")),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
slug, video = self.parse_title(self.title)
|
||||||
|
|
||||||
|
r = self.session.get(self.base + f"brand/?slug={slug}")
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
series = [series["id"] for series in data["series"]]
|
||||||
|
seasons = [self.session.get(self.base + f"series/?id={i}").json() for i in series]
|
||||||
|
|
||||||
|
if video:
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("video_id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode.get("brand_name"),
|
||||||
|
season=int(episode.get("series_number", 0)),
|
||||||
|
number=int(episode.get("episode_number", 0)),
|
||||||
|
name=episode.get("name"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for season in seasons
|
||||||
|
for episode in season["episodes"]
|
||||||
|
if int(episode.get("video_id")) == int(video)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
episodes = [
|
||||||
|
Episode(
|
||||||
|
id_=episode.get("video_id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode.get("brand_name"),
|
||||||
|
season=int(episode.get("series_number", 0)),
|
||||||
|
number=int(episode.get("episode_number", 0)),
|
||||||
|
name=episode.get("name"),
|
||||||
|
language="en",
|
||||||
|
data=episode,
|
||||||
|
)
|
||||||
|
for season in seasons
|
||||||
|
for episode in season["episodes"]
|
||||||
|
]
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
r = self.session.get(
|
||||||
|
self.config["endpoints"]["playback"].format(id=title.id),
|
||||||
|
headers=self.config["headers"],
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
data = r.json()
|
||||||
|
|
||||||
|
self.license = next((
|
||||||
|
x["key_systems"]["com.widevine.alpha"]["license_url"]
|
||||||
|
for x in data["sources"]
|
||||||
|
if x.get("key_systems").get("com.widevine.alpha")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
source_manifest = next((
|
||||||
|
x["src"] for x in data["sources"]
|
||||||
|
if x.get("key_systems").get("com.widevine.alpha")),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if not self.license or not source_manifest:
|
||||||
|
raise ValueError("Failed to get license or manifest")
|
||||||
|
|
||||||
|
manifest = self.trim_duration(source_manifest)
|
||||||
|
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
role = track.data["dash"]["representation"].find("Role")
|
||||||
|
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||||
|
track.descriptive = True
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||||
|
chapters = []
|
||||||
|
if title.data.get("credits_cuepoint"):
|
||||||
|
chapters = [Chapter(name="Credits", timestamp=title.data.get("credits_cuepoint"))]
|
||||||
|
|
||||||
|
return Chapters(chapters)
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||||
|
r = self.session.post(url=self.license, data=challenge)
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise ConnectionError(r.text)
|
||||||
|
return r.content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_title(title: str) -> tuple[str, str]:
|
||||||
|
title_re = (
|
||||||
|
r"^(?:https?://(?:www\.)?u\.co.uk/shows/)?"
|
||||||
|
r"(?P<slug>[a-z0-9-]+)(?:/[a-z0-9-]+/[a-z0-9-]+/(?P<vid>[0-9-]+))?"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
slug, video = (re.match(title_re, title).group(i) for i in ("slug", "vid"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
return slug, video
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def trim_duration(source_manifest: str) -> str:
|
||||||
|
"""
|
||||||
|
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||||
|
So we trim the duration by exactly one segment to account for that.
|
||||||
|
|
||||||
|
TODO: Calculate the segment duration instead of assuming length.
|
||||||
|
"""
|
||||||
|
manifest = DASH.from_url(source_manifest).manifest
|
||||||
|
period_duration = manifest.get("mediaPresentationDuration")
|
||||||
|
period_duration = DASH.pt_to_sec(period_duration)
|
||||||
|
|
||||||
|
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||||
|
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||||
|
manifest.set("mediaPresentationDuration", new_duration)
|
||||||
|
|
||||||
|
return etree.tostring(manifest, encoding="unicode")
|
||||||
9
services/UKTV/config.yaml
Normal file
9
services/UKTV/config.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
headers:
|
||||||
|
BCOV-POLICY: BCpkADawqM2ZEz-kf0i2xEP9VuhJF_DB5boH7YAeSx5EHDSNFFl4QUoHZ3bKLQ9yWboSOBNyvZKm4HiZrqMNRxXm-laTAnmls1QOL7_kUM3Eij4KjQMz0epMs3WIedg64fnRxQTX6XubGE9p
|
||||||
|
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||||
|
Host: edge.api.brightcove.com
|
||||||
|
Connection: keep-alive
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
base: https://vschedules.uktv.co.uk/vod/
|
||||||
|
playback: https://edge.api.brightcove.com/playback/v1/accounts/1242911124001/videos/{id}
|
||||||
339
services/UNEXT/__init__.py
Normal file
339
services/UNEXT/__init__.py
Normal file
@ -0,0 +1,339 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import click
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
|
||||||
|
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.tracks import Chapter, Tracks
|
||||||
|
|
||||||
|
|
||||||
|
class UNEXT(Service):
|
||||||
|
"""
|
||||||
|
Service code for the Unext streaming service (https://video.unext.jp/).
|
||||||
|
|
||||||
|
\b
|
||||||
|
Authorization: Cookies
|
||||||
|
|
||||||
|
\b
|
||||||
|
Note: - No jp proxies needed - emejing!
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ["UNXT", "UNEXT", "U-NEXT", "unxt", "unext", "u-next"]
|
||||||
|
GEOFENCE = []
|
||||||
|
TITLE_RE = [
|
||||||
|
r"^https?://(?:www\.)?video\.unext\.jp/(?P<category>title/|\?td=|episode/)?(?P<id>[a-zA-Z0-9-]+)(?:/)?(?P<epsid>E[\d\w]+)?"
|
||||||
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="UNEXT", short_help="https://video.unext.jp/")
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option("-l", "--lang", default="ja", help="Specify language")
|
||||||
|
@click.option("-ft", "--force-title", required=False, help="Force using this title as titlename.")
|
||||||
|
@click.option("-nt", "--notitle", is_flag=True, default=False, help="Dont grab episode title.")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, **kwargs: Any) -> "UNEXT":
|
||||||
|
return UNEXT(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str, lang: str, force_title: str, notitle: bool):
|
||||||
|
super().__init__(ctx)
|
||||||
|
m = self.parse_title(ctx, title)
|
||||||
|
if m and m.get("category"):
|
||||||
|
self.category = m.get("category")
|
||||||
|
else:
|
||||||
|
self.category = "regular"
|
||||||
|
if m and m.get("epsid"):
|
||||||
|
self.epsid = m.get("epsid")
|
||||||
|
else:
|
||||||
|
self.epsid = None
|
||||||
|
self.lang = lang
|
||||||
|
self.forcetitle = force_title
|
||||||
|
self.notitle = notitle
|
||||||
|
self.profile = ctx.obj.profile
|
||||||
|
|
||||||
|
self.endroll = None
|
||||||
|
|
||||||
|
def get_titles(self):
|
||||||
|
# First API call for title stage data
|
||||||
|
resp = self.session.post(
|
||||||
|
url=self.config["endpoints"]["cc"],
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
json={
|
||||||
|
"operationName": "cosmo_getVideoTitle",
|
||||||
|
"variables": {"code": self.title},
|
||||||
|
"query": self.config["queries"]["vidtitle"].replace(r"\n", "\n"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
stage_data = resp.json()["data"]["webfront_title_stage"]
|
||||||
|
except Exception:
|
||||||
|
if "Token expired" in resp.text:
|
||||||
|
return None
|
||||||
|
self.log.error(f" - Got an error!, resp: {resp.text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if stage_data.get("hasSubtitle"):
|
||||||
|
self.log.warn(" - This title probably has hardcoded subs if no subs available in the manifest")
|
||||||
|
|
||||||
|
# Second API call for episodes
|
||||||
|
res2 = self.session.post(
|
||||||
|
url=self.config["endpoints"]["cc"],
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
json={
|
||||||
|
"operationName": "cosmo_getTitle",
|
||||||
|
"variables": {
|
||||||
|
"id": self.title,
|
||||||
|
"episodeCode": stage_data["currentEpisode"]["id"],
|
||||||
|
"episodePageSize": 1000,
|
||||||
|
"episodePage": 1,
|
||||||
|
},
|
||||||
|
"query": self.config["queries"]["title"].replace(r"\n", "\n"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data2 = res2.json()["data"]
|
||||||
|
self.log.debug(data2)
|
||||||
|
except Exception:
|
||||||
|
self.log.error(f" - Got an error!, resp: {res2.text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
title_stage = data2["webfront_title_stage"]
|
||||||
|
episodes = data2["webfront_title_titleEpisodes"]["episodes"]
|
||||||
|
title_ = stage_data["titleName"].strip()
|
||||||
|
publish_style = title_stage.get("publishStyleCode", "")
|
||||||
|
titles = []
|
||||||
|
|
||||||
|
if publish_style == "VOD_SINGLE":
|
||||||
|
ep = episodes[0]
|
||||||
|
if ep.get("displayNo", "") == "予告編":
|
||||||
|
eps_num = 0
|
||||||
|
else:
|
||||||
|
num_match = re.search(r"\d+", ep.get("displayNo", "")) or re.search(r"\d+", ep.get("episodeName", ""))
|
||||||
|
eps_num = int(num_match.group(0)) if num_match else 0
|
||||||
|
if ep.get("displayNo", "") == "最終話":
|
||||||
|
eps_num += 1
|
||||||
|
|
||||||
|
eps_name = (
|
||||||
|
None
|
||||||
|
if self.notitle
|
||||||
|
else ("Trailer" if ep.get("displayNo", "") == "予告編" else ep.get("episodeName", "").strip())
|
||||||
|
)
|
||||||
|
titles.append(
|
||||||
|
Movies(
|
||||||
|
id_=ep["id"],
|
||||||
|
title=self.forcetitle if self.forcetitle else title_,
|
||||||
|
year=stage_data.get("productionYear", 0),
|
||||||
|
number=eps_num,
|
||||||
|
name=eps_name,
|
||||||
|
language=self.lang,
|
||||||
|
service=self.__class__,
|
||||||
|
data=ep,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif publish_style == "VOD_MULTI":
|
||||||
|
episode_objs = []
|
||||||
|
last_eps = []
|
||||||
|
for ep in episodes:
|
||||||
|
# Compute eps_num from displayNo or episodeName
|
||||||
|
if ep.get("displayNo", "") == "予告編":
|
||||||
|
eps_num = 0
|
||||||
|
else:
|
||||||
|
num_match = re.search(r"\d+", ep.get("displayNo", "")) or re.search(
|
||||||
|
r"\d+", ep.get("episodeName", "")
|
||||||
|
)
|
||||||
|
eps_num = int(num_match.group(0)) if num_match else 0
|
||||||
|
last_eps.append(eps_num)
|
||||||
|
if ep.get("displayNo", "") == "最終話":
|
||||||
|
eps_num = max(last_eps) + 1
|
||||||
|
|
||||||
|
# Extract and clean episode name
|
||||||
|
ep_name = ep.get("episodeName", "")
|
||||||
|
match = re.search(r"-(.*?)-", ep_name)
|
||||||
|
ep_class = match.group(1) if match else ""
|
||||||
|
clean_name = re.sub(r"\s*-\s*.*?\s*-\s*", "", ep_name)
|
||||||
|
|
||||||
|
episode_id = ep["id"]
|
||||||
|
if len(episode_id) < 4:
|
||||||
|
episode_id += "5349"
|
||||||
|
|
||||||
|
episode_obj = Episode(
|
||||||
|
id_=episode_id,
|
||||||
|
title=title_,
|
||||||
|
season="1",
|
||||||
|
number=eps_num,
|
||||||
|
name=clean_name,
|
||||||
|
language=self.lang,
|
||||||
|
service=self.__class__,
|
||||||
|
data=ep,
|
||||||
|
)
|
||||||
|
episode_obj.episode_class = ep_class
|
||||||
|
|
||||||
|
episode_objs.append(episode_obj)
|
||||||
|
titles.append(Series(episode_objs))
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("The current title category is not implemented yet!")
|
||||||
|
|
||||||
|
return titles
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
res = self.session.post(
|
||||||
|
url=self.config["endpoints"]["cc"],
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
json={
|
||||||
|
"operationName": "cosmo_getPlaylistUrl",
|
||||||
|
"variables": {
|
||||||
|
"code": title.id,
|
||||||
|
"playMode": "caption",
|
||||||
|
"bitrateLow": 192,
|
||||||
|
"bitrateHigh": None,
|
||||||
|
"validationOnly": False,
|
||||||
|
},
|
||||||
|
"query": self.config["queries"]["playlist"].replace(r"\n", "\n"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = res.json()["data"]["webfront_playlistUrl"]
|
||||||
|
self.log.debug(data)
|
||||||
|
except Exception:
|
||||||
|
self.log.error(f" - Got an error!, resp: {res.text}")
|
||||||
|
|
||||||
|
if data["resultStatus"] == 476:
|
||||||
|
self.log.error(" - This title need a rent! No playback returned.")
|
||||||
|
elif data["resultStatus"] == 462:
|
||||||
|
self.log.error(
|
||||||
|
" - Playing on another device. Playback on multiple devices at the same time is not possible. (462)"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.play_token = data["playToken"]
|
||||||
|
manifest_url = next(x["playlistUrl"] for x in data["urlInfo"][0]["movieProfile"] if x["type"] == "DASH")
|
||||||
|
manifest_url += f"&play_token={self.play_token}"
|
||||||
|
self.log.debug(manifest_url)
|
||||||
|
self.log.info(f"MPD: {manifest_url}")
|
||||||
|
|
||||||
|
self.endroll = data["urlInfo"][0].get("endrollStartPosition")
|
||||||
|
|
||||||
|
tracks.add(
|
||||||
|
DASH.from_url(url=manifest_url, session=self.session, source=self.ALIASES[0]).to_tracks(title.language)
|
||||||
|
)
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
track.needs_proxy = False
|
||||||
|
if isinstance(track.url, list):
|
||||||
|
track.url = list(map(lambda x: re.sub(r"(\?|\&)play_token=.*", "", x), track.url))
|
||||||
|
else:
|
||||||
|
track.url = re.sub(r"(\?|\&)play_token=.*", "", track.url)
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Titles_T):
|
||||||
|
if self.endroll:
|
||||||
|
|
||||||
|
def to_hms(s):
|
||||||
|
s = int(s)
|
||||||
|
m, s = divmod(s, 60)
|
||||||
|
h, m = divmod(m, 60)
|
||||||
|
return "{:02}:{:02}:{:02}".format(int(h), int(m), int(s))
|
||||||
|
|
||||||
|
chaps = [
|
||||||
|
Chapter(
|
||||||
|
number=1,
|
||||||
|
title="Start",
|
||||||
|
timecode="00:00:00.000",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
chaps.append(Chapter(number=2, title="The End", timecode=to_hms(self.endroll)))
|
||||||
|
|
||||||
|
return chaps
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, *, challenge: bytes) -> None:
|
||||||
|
return self.session.post(
|
||||||
|
url=self.config["endpoints"]["license"], params={"play_token": self.play_token}, data=challenge
|
||||||
|
).content
|
||||||
|
|
||||||
|
# Service specific functions
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"User-Agent": "ATVE/6.2.0 Android/10 build/6A226 maker/Google model/Chromecast FW/QTS2.200918.0337115981",
|
||||||
|
"X-Forwarded-For": "103.140.112.106",
|
||||||
|
"Origin": "https://video.unext.jp",
|
||||||
|
"Referer": "https://video.unext.jp/",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
|
||||||
|
def get_token(self) -> str:
|
||||||
|
token_cache_path = Path(config.directories.cache / self.__class__.__name__ / f"token.json")
|
||||||
|
if token_cache_path.is_file():
|
||||||
|
token = json.loads(token_cache_path.read_text(encoding="utf-8"))
|
||||||
|
if token.get("exp", 0) > int(time.time()):
|
||||||
|
# not expired, lets use
|
||||||
|
self.log.info(" + Using cached token...")
|
||||||
|
return token["_at"]
|
||||||
|
# expired, refresh
|
||||||
|
self.log.info(" + Refreshing and using cached auth tokens...")
|
||||||
|
return self.save_token(self.refresh(), token_cache_path)
|
||||||
|
self.log.info(" + Caching new token...")
|
||||||
|
return self.save_token(self.refresh(), token_cache_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def save_token(token: str, to: Path) -> str:
|
||||||
|
data = {}
|
||||||
|
data["_at"] = token
|
||||||
|
data["exp"] = int(time.time()) + 10800 # add 3 hours from now to be safe
|
||||||
|
# lets cache the token
|
||||||
|
to.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
to.write_text(json.dumps(data), encoding="utf8")
|
||||||
|
# finally return the token
|
||||||
|
return token
|
||||||
|
|
||||||
|
def refresh(self) -> str:
|
||||||
|
res = self.session.get(
|
||||||
|
url=self.config["endpoints"]["home"],
|
||||||
|
headers={
|
||||||
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,"
|
||||||
|
"image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
_at = re.search(r"(?<=_at=)([\w\d\.\-]+)", res.headers.get("Set-Cookie"))
|
||||||
|
if _at:
|
||||||
|
_at = _at.group(1)
|
||||||
|
else:
|
||||||
|
self.log.error(f" - Failed to refresh token: {res.text}")
|
||||||
|
return _at
|
||||||
|
|
||||||
|
def parse_title(self, ctx, title):
|
||||||
|
title = title or ctx.parent.params.get("title")
|
||||||
|
if not title:
|
||||||
|
self.log.error(" - No title ID specified")
|
||||||
|
if not getattr(self, "TITLE_RE"):
|
||||||
|
self.title = title
|
||||||
|
return {}
|
||||||
|
for regex in as_list(self.TITLE_RE):
|
||||||
|
m = re.search(regex, title)
|
||||||
|
if m:
|
||||||
|
self.title = m.group("id")
|
||||||
|
return m.groupdict()
|
||||||
|
self.log.warning(f" - Unable to parse title ID {title!r}, using as-is")
|
||||||
|
self.title = title
|
||||||
102
services/UNEXT/config.yaml
Normal file
102
services/UNEXT/config.yaml
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
endpoints:
|
||||||
|
cc: https://cc.unext.jp/
|
||||||
|
home: https://video.unext.jp/
|
||||||
|
license: https://wvproxy.unext.jp/proxy
|
||||||
|
queries:
|
||||||
|
epstitle: "query cosmo_getEpisodeDetail($code: ID!, $episodeCode: ID!)
|
||||||
|
{\\n webfront_title_stage(id: $code)
|
||||||
|
{\\n id\\n titleName\\n productionYear\\n missingAlertText\\n \
|
||||||
|
nextUpdateDateTime\\n copyright\\n mainGenreId\\n episode(id:
|
||||||
|
$episodeCode)
|
||||||
|
{\\n id\\n episodeName\\n purchaseEpisodeLimitday\\n th\
|
||||||
|
umbnail
|
||||||
|
{\\n standard\\n __typename\\n }\\n duration\\n \
|
||||||
|
displayNo\\n interruption\\n completeFlag\\n saleTypeCode\\n \
|
||||||
|
introduction\\n saleText\\n episodeNotices\\n isNew\\n \
|
||||||
|
hasPackRights\\n minimumPrice:
|
||||||
|
minPrice\\n hasMultiplePrices\\n productLineupCodeList\\n is\
|
||||||
|
Purchased\\n purchaseEpisodeLimitday\\n __typename\\n }\\n \
|
||||||
|
publicMainEpisodeCount\\n __typename\\n }\\n webfront_title_relatedTit\
|
||||||
|
les(id: $code) {\\n id\\n groupName\\n titles
|
||||||
|
{\\n ...TitleCard\\n __typename\\n }\\n __typename\\n }\\n \
|
||||||
|
webfront_title_relatedBooks(id: $code, pageSize: 20, page: 1)
|
||||||
|
{\\n pageInfo {\\n results\\n __typename\\n }\\n books
|
||||||
|
{\\n ...BookTitleCard\\n __typename\\n }\\n __typename\\n }\
|
||||||
|
\\n}\\n\\nfragment TitleCard on Title
|
||||||
|
{\\n id\\n titleName\\n isNew\\n catchphrase\\n rate\\n productionYear\
|
||||||
|
\\n updateOfWeek\\n lastEpisode\\n nfreeBadge\\n hasSubtitle\\n hasDub\
|
||||||
|
\\n paymentBadgeList
|
||||||
|
{\\n code\\n __typename\\n }\\n productLineupCodeList\\n thumbnail
|
||||||
|
{\\n standard\\n __typename\\n }\\n hasPackRights\\n hasMultiprice\
|
||||||
|
\\n minimumPrice\\n isOriginal\\n exclusive
|
||||||
|
{\\n isOnlyOn\\n typeCode\\n __typename\\n }\\n __typename\\n}\\n\
|
||||||
|
\\nfragment BookCard on Book {\\n code\\n name\\n mediaType
|
||||||
|
{\\n code\\n __typename\\n }\\n thumbnail
|
||||||
|
{\\n standard\\n __typename\\n }\\n publisher
|
||||||
|
{\\n code\\n name\\n __typename\\n }\\n credits
|
||||||
|
{\\n personCode\\n penName\\n __typename\\n }\\n publishedIn
|
||||||
|
{\\n name\\n __typename\\n }\\n isPreorderable\\n publicStartDateT\
|
||||||
|
ime\\n __typename\\n}\\n\\nfragment BookTitleCard on BookSakuhin
|
||||||
|
{\\n sakuhinCode:
|
||||||
|
code\\n name\\n rate\\n freeBookNum\\n isNew\\n isSale\\n book
|
||||||
|
{\\n ...BookCard\\n __typename\\n }\\n minPrice\\n hasMultiplePric\
|
||||||
|
es\\n totalBookNum\\n isUnextOriginal\\n paymentBadgeList
|
||||||
|
{\\n code\\n __typename\\n }\\n bookViewCode\\n featurePieceCode\\\
|
||||||
|
n subgenreTagList
|
||||||
|
{\\n name\\n __typename\\n }\\n __typename\\n}\\n"
|
||||||
|
title: "query cosmo_getTitle($id: ID!, $episodeCode: ID!, $episodePage: Int,
|
||||||
|
$episodePageSize: Int) {\\n webfront_title_stage(id: $id)
|
||||||
|
{\\n id\\n titleName\\n publishStyleCode\\n episode(id:
|
||||||
|
$episodeCode)
|
||||||
|
{\\n id\\n hasSubtitle\\n hasDub\\n __typename\\n }\
|
||||||
|
\\n __typename\\n }\\n webfront_title_titleEpisodes(\\n id:
|
||||||
|
$id\\n page: $episodePage\\n pageSize: $episodePageSize\\n )
|
||||||
|
{\\n episodes
|
||||||
|
{\\n id\\n episodeName\\n displayNo\\n thumbnail
|
||||||
|
{\\n standard\\n __typename\\n }\\n duration\\n \
|
||||||
|
interruption\\n completeFlag\\n __typename\\n }\\n __typena\
|
||||||
|
me\\n }\\n}\\n"
|
||||||
|
playlist: "query cosmo_getPlaylistUrl($code: String, $playMode: String,
|
||||||
|
$bitrateLow: Int, $bitrateHigh: Int, $validationOnly: Boolean)
|
||||||
|
{\\n webfront_playlistUrl(\\n code: $code\\n playMode:
|
||||||
|
$playMode\\n bitrateLow: $bitrateLow\\n bitrateHigh:
|
||||||
|
$bitrateHigh\\n validationOnly: $validationOnly\\n )
|
||||||
|
{\\n subTitle\\n playToken\\n playTokenHash\\n beaconSpan\\n \
|
||||||
|
result
|
||||||
|
{\\n errorCode\\n errorMessage\\n __typename\\n }\\n r\
|
||||||
|
esultStatus\\n licenseExpireDate\\n urlInfo
|
||||||
|
{\\n code\\n startPoint\\n resumePoint\\n endPoint\\n \
|
||||||
|
endrollStartPosition\\n holderId\\n saleTypeCode\\n sceneSea\
|
||||||
|
rchList
|
||||||
|
{\\n IMS_AD1\\n IMS_L\\n IMS_M\\n IMS_S\\n \
|
||||||
|
__typename\\n }\\n movieProfile
|
||||||
|
{\\n cdnId\\n type\\n playlistUrl\\n movieAudio\
|
||||||
|
List
|
||||||
|
{\\n audioType\\n __typename\\n }\\n licens\
|
||||||
|
eUrlList
|
||||||
|
{\\n type\\n licenseUrl\\n __typename\\n }\
|
||||||
|
\\n __typename\\n }\\n umcContentId\\n movieSecurityL\
|
||||||
|
evelCode\\n captionFlg\\n dubFlg\\n commodityCode\\n mo\
|
||||||
|
vieAudioList
|
||||||
|
{\\n audioType\\n __typename\\n }\\n __typename\\n \
|
||||||
|
}\\n __typename\\n }\\n}\\n"
|
||||||
|
vidtitle: "query cosmo_getVideoTitle($code: ID!) {\\n webfront_title_stage(id:
|
||||||
|
$code)
|
||||||
|
{\\n id\\n titleName\\n rate\\n userRate\\n productionYear\\\
|
||||||
|
n country\\n catchphrase\\n attractions\\n story\\n check\\n \
|
||||||
|
seriesCode\\n seriesName\\n publicStartDate\\n displayPublicEndDate\
|
||||||
|
\\n restrictedCode\\n copyright\\n mainGenreId\\n bookmarkStatus\
|
||||||
|
\\n thumbnail
|
||||||
|
{\\n standard\\n secondary\\n __typename\\n }\\n mainG\
|
||||||
|
enreName\\n isNew\\n exclusiveBadgeCode\\n exclusive
|
||||||
|
{\\n typeCode\\n isOnlyOn\\n __typename\\n }\\n isOrig\
|
||||||
|
inal\\n lastEpisode\\n updateOfWeek\\n nextUpdateDateTime\\n pr\
|
||||||
|
oductLineupCodeList\\n hasMultiprice\\n minimumPrice\\n country\\n \
|
||||||
|
productionYear\\n paymentBadgeList
|
||||||
|
{\\n name\\n code\\n __typename\\n }\\n nfreeBadge\\n \
|
||||||
|
hasDub\\n hasSubtitle\\n saleText\\n currentEpisode
|
||||||
|
{\\n id\\n interruption\\n duration\\n completeFlag\\n \
|
||||||
|
displayDurationText\\n existsRelatedEpisode\\n playButtonName\\n \
|
||||||
|
purchaseEpisodeLimitday\\n __typename\\n }\\n publicMainEpisodeC\
|
||||||
|
ount\\n comingSoonMainEpisodeCount\\n missingAlertText\\n sakuhinN\
|
||||||
|
otices\\n hasPackRights\\n __typename\\n }\\n}\\n"
|
||||||
483
services/VIKI/__init__.py
Normal file
483
services/VIKI/__init__.py
Normal file
@ -0,0 +1,483 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import click
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from langcodes import Language
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
import hmac
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from typing import Optional, Union, Generator
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.constants import AnyTrack
|
||||||
|
from devine.core.manifests import DASH
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Chapters, Tracks, Subtitle
|
||||||
|
from devine.core.tracks.attachment import Attachment
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.downloaders import n_m3u8dl_re
|
||||||
|
|
||||||
|
|
||||||
|
class VIKI(Service):
|
||||||
|
"""
|
||||||
|
Service code for Viki
|
||||||
|
Written by ToonsHub, improved by @sp4rk.y
|
||||||
|
|
||||||
|
Authorization: None (Free SD) | Cookies (Free and Paid Titles)
|
||||||
|
Security: FHD@L3
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?viki\.com/(?:tv|movies)/)(?P<id>[a-z0-9]+)(?:-.+)?$"
|
||||||
|
# GEOFENCE = ("ca",)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="VIKI", short_help="https://www.viki.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option("-m", "--movie", is_flag=True, default=False, help="Title is a Movie.")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return VIKI(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str, movie: bool):
|
||||||
|
self.title = title
|
||||||
|
|
||||||
|
# Decide if it's a movie or series
|
||||||
|
if "/movies/" in self.title:
|
||||||
|
self.is_movie = True
|
||||||
|
else:
|
||||||
|
self.is_movie = movie
|
||||||
|
|
||||||
|
self.device_id = "157428845d"
|
||||||
|
self.app_id = "100531a"
|
||||||
|
self.app_secret_key = "a4e52e9b08620b7131d1830c71cde6cf03c4a7b00d664d9dec8ee27a19d13ba0"
|
||||||
|
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
# Set default "browser" style headers for normal usage
|
||||||
|
self.session.headers.update(
|
||||||
|
{
|
||||||
|
"user-agent": self.config["browser"]["headers"]["user-agent"],
|
||||||
|
"x-client-user-agent": self.config["browser"]["headers"]["user-agent"],
|
||||||
|
"x-viki-app-ver": self.config["browser"]["headers"]["x-viki-app-ver"],
|
||||||
|
"x-viki-as-id": self.config["browser"]["headers"]["x-viki-as-id"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def authenticate(self, cookies: Optional[CookieJar] = None, credential=None):
|
||||||
|
if cookies:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
return
|
||||||
|
|
||||||
|
cache_file = Path(config.directories.cache / self.__class__.__name__ / "token.json")
|
||||||
|
|
||||||
|
# Check if session cache exists and is still valid
|
||||||
|
if os.path.exists(cache_file):
|
||||||
|
with open(cache_file, "r") as f:
|
||||||
|
session_data = json.load(f)
|
||||||
|
if session_data.get("expiry_time") > time.time():
|
||||||
|
self.token = session_data["token"]
|
||||||
|
self.session.cookies.update(session_data["cookies"])
|
||||||
|
self.log.info("Reusing cached session token.")
|
||||||
|
return self.token
|
||||||
|
else:
|
||||||
|
self.log.info("Session expired, re-authenticating.")
|
||||||
|
|
||||||
|
# Step 1: Get device registration code
|
||||||
|
login_code_data = {"type": "androidtv", "device_id": self.device_id}
|
||||||
|
login_code_data_str = json.dumps(login_code_data, separators=(",", ":"))
|
||||||
|
timestamp_login_code = str(round(time.time()))
|
||||||
|
|
||||||
|
code_gen_api = f"/v5/devices.json?app={self.app_id}&t={timestamp_login_code}{login_code_data_str}"
|
||||||
|
|
||||||
|
signature = hmac.new(
|
||||||
|
self.app_secret_key.encode("utf-8"), code_gen_api.encode("utf-8"), hashlib.sha1
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
login_code_headers = {
|
||||||
|
**self.config["tv_login_headers"],
|
||||||
|
"signature": signature,
|
||||||
|
"timestamp": timestamp_login_code,
|
||||||
|
}
|
||||||
|
|
||||||
|
res = self.session.post(
|
||||||
|
self.config["endpoints"]["login_code"], headers=login_code_headers, data=login_code_data_str
|
||||||
|
)
|
||||||
|
res.raise_for_status()
|
||||||
|
result_json = res.json()
|
||||||
|
code = result_json["device_registration_code"]
|
||||||
|
self.log.info(f"Login Code: {code}")
|
||||||
|
|
||||||
|
self.log.info("Go to https://www.viki.com/androidtv and enter the Login code above.")
|
||||||
|
input("Press Enter after you have logged in.")
|
||||||
|
|
||||||
|
# Step 2: Verify code
|
||||||
|
timestamp_verify_data = str(round(time.time()))
|
||||||
|
verify_api_str = (
|
||||||
|
f"/v5/devices/{code}.json?device_code={code}&type=androidtv&app={self.app_id}&t={timestamp_verify_data}"
|
||||||
|
)
|
||||||
|
|
||||||
|
verify_sig = hmac.new(
|
||||||
|
self.app_secret_key.encode("utf-8"), verify_api_str.encode("utf-8"), hashlib.sha1
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
verify_headers = {
|
||||||
|
**self.config["tv_login_headers"],
|
||||||
|
"signature": verify_sig,
|
||||||
|
"timestamp": timestamp_verify_data,
|
||||||
|
}
|
||||||
|
|
||||||
|
r_verify = self.session.get(self.config["endpoints"]["verify"].format(code=code), headers=verify_headers)
|
||||||
|
r_verify.raise_for_status()
|
||||||
|
device_token = r_verify.json()["device_token"]
|
||||||
|
|
||||||
|
# Step 3: Exchange device token for session token
|
||||||
|
timestamp_token_api = str(round(time.time()))
|
||||||
|
token_api_str = f"/v5/sessions.json?app={self.app_id}&t={timestamp_token_api}"
|
||||||
|
token_data = {"device_token": device_token, "type": "androidtv"}
|
||||||
|
token_data_str = json.dumps(token_data, separators=(",", ":"))
|
||||||
|
|
||||||
|
token_api_signature = hmac.new(
|
||||||
|
self.app_secret_key.encode("utf-8"), token_api_str.encode("utf-8"), hashlib.sha1
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
token_headers = {
|
||||||
|
**self.config["tv_login_headers"],
|
||||||
|
"signature": token_api_signature,
|
||||||
|
"timestamp": timestamp_token_api,
|
||||||
|
}
|
||||||
|
|
||||||
|
r_token = self.session.post(
|
||||||
|
self.config["endpoints"]["session_token"], headers=token_headers, data=token_data_str
|
||||||
|
)
|
||||||
|
r_token.raise_for_status()
|
||||||
|
token_json = r_token.json()
|
||||||
|
|
||||||
|
self.token = token_json.get("token")
|
||||||
|
expiry_time = time.time() + token_json.get("expiry_time", 3600)
|
||||||
|
|
||||||
|
cache_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(cache_file, "w") as f:
|
||||||
|
json.dump({"token": self.token, "expiry_time": expiry_time, "cookies": self.session.cookies.get_dict()}, f)
|
||||||
|
|
||||||
|
self.log.debug(f"Session Token: {self.token} (expires in {token_json.get('expiry_time')} seconds)")
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
query = self.title
|
||||||
|
response = self.session.get(
|
||||||
|
self.config["endpoints"]["search_endpoint_url"],
|
||||||
|
params={
|
||||||
|
"term": query,
|
||||||
|
"app": "100000a",
|
||||||
|
"per_page": 10,
|
||||||
|
"blocked": "true",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
search_data = response.json()
|
||||||
|
|
||||||
|
for result in search_data["response"]:
|
||||||
|
media_type = "TV" if result["type"] == "series" else "Movie"
|
||||||
|
year = None
|
||||||
|
distributors = result.get("distributors")
|
||||||
|
if distributors:
|
||||||
|
from_date = distributors[0].get("from")
|
||||||
|
if from_date:
|
||||||
|
year_match = re.match(r"^\d{4}", from_date)
|
||||||
|
if year_match:
|
||||||
|
year = year_match.group()
|
||||||
|
label = media_type
|
||||||
|
if year:
|
||||||
|
label += f" ({year})"
|
||||||
|
|
||||||
|
if "viki_air_time" in result:
|
||||||
|
release_time = datetime.datetime.fromtimestamp(result["viki_air_time"], datetime.timezone.utc)
|
||||||
|
if release_time > datetime.datetime.now(datetime.timezone.utc):
|
||||||
|
time_diff = release_time - datetime.datetime.now(datetime.timezone.utc)
|
||||||
|
days, seconds = time_diff.days, time_diff.seconds
|
||||||
|
hours = days * 24 + seconds // 3600
|
||||||
|
minutes = (seconds % 3600) // 60
|
||||||
|
if hours > 0:
|
||||||
|
label = f"In {hours} hours"
|
||||||
|
elif minutes > 0:
|
||||||
|
label = f"In {minutes} minutes"
|
||||||
|
else:
|
||||||
|
label = "In less than a minute"
|
||||||
|
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result["id"],
|
||||||
|
title=result["titles"]["en"],
|
||||||
|
description=result.get("descriptions", {}).get("en", "")[:200] + "...",
|
||||||
|
label=label,
|
||||||
|
url=f"https://www.viki.com/tv/{result['id']}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
match = re.match(self.TITLE_RE, self.title)
|
||||||
|
if match:
|
||||||
|
title_id = match.group("id")
|
||||||
|
else:
|
||||||
|
title_id = self.title
|
||||||
|
|
||||||
|
if not self.is_movie:
|
||||||
|
self.is_movie = False
|
||||||
|
episodes = []
|
||||||
|
pagenumber = 1
|
||||||
|
special_episode_number = 1
|
||||||
|
while True:
|
||||||
|
series_metadata_url = self.config["endpoints"]["episode_metadata"].format(
|
||||||
|
id=title_id, pagenumber=pagenumber
|
||||||
|
)
|
||||||
|
series_metadata = self.session.get(series_metadata_url).json()
|
||||||
|
self.series_metadata = series_metadata
|
||||||
|
|
||||||
|
if not series_metadata["response"] and not series_metadata["more"]:
|
||||||
|
break
|
||||||
|
|
||||||
|
show_year = self.get_show_year_from_search()
|
||||||
|
|
||||||
|
for episode in series_metadata["response"]:
|
||||||
|
episode_id = episode["id"]
|
||||||
|
if len(episode_id) < 4:
|
||||||
|
episode_id += "5349"
|
||||||
|
|
||||||
|
show_title = episode["container"]["titles"]["en"]
|
||||||
|
episode_season = 1
|
||||||
|
episode_number = episode["number"]
|
||||||
|
current_time = int(time.time())
|
||||||
|
geo_blocking = episode.get("blocking", {}).get("geo", False)
|
||||||
|
viki_air_time = episode.get("viki_air_time", 0)
|
||||||
|
|
||||||
|
if geo_blocking:
|
||||||
|
if current_time < viki_air_time:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
self.log.info(f"Episode {episode.get('number')} is blocked due to Geo-Location.\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
title_match = re.match(r"^(.*?)(?: (\d{4})$| (\d{1,2})$)?", show_title)
|
||||||
|
if title_match:
|
||||||
|
base_title = title_match.group(1)
|
||||||
|
year = title_match.group(2)
|
||||||
|
season = title_match.group(3)
|
||||||
|
if year:
|
||||||
|
show_title = base_title
|
||||||
|
elif season:
|
||||||
|
episode_season = int(season)
|
||||||
|
|
||||||
|
episode_title_with_year = f"{show_title} {show_year}"
|
||||||
|
if "Special" in episode.get("titles", {}).get("en", "") or "Extra" in episode.get("titles", {}).get(
|
||||||
|
"en", ""
|
||||||
|
):
|
||||||
|
episode_season = 0
|
||||||
|
episode_number = special_episode_number
|
||||||
|
special_episode_number += 1
|
||||||
|
|
||||||
|
episode_name = None
|
||||||
|
episode_class = Episode(
|
||||||
|
id_=episode_id,
|
||||||
|
title=episode_title_with_year,
|
||||||
|
season=episode_season,
|
||||||
|
number=episode_number,
|
||||||
|
name=episode_name,
|
||||||
|
year=show_year,
|
||||||
|
service=self.__class__,
|
||||||
|
)
|
||||||
|
if "images" in episode and "poster" in episode["images"] and "url" in episode["images"]["poster"]:
|
||||||
|
episode_class.thumbnail_url = episode["images"]["poster"]["url"]
|
||||||
|
|
||||||
|
episodes.append(episode_class)
|
||||||
|
pagenumber += 1
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
else:
|
||||||
|
movie_metadata = self.session.get(f"https://www.viki.com/movies/{title_id}").text
|
||||||
|
video_id = re.search(r"https://api.viki.io/v4/videos/(.*?).json", movie_metadata).group(1)
|
||||||
|
|
||||||
|
movie_metadata = self.session.get(self.config["endpoints"]["video_metadata"].format(id=video_id)).json()
|
||||||
|
self.movie_metadata = movie_metadata
|
||||||
|
movie_id = movie_metadata["id"]
|
||||||
|
movie_name = movie_metadata["titles"]["en"]
|
||||||
|
title_match = re.match(r"^(.*?)(?: (\d{4}))?$", movie_name)
|
||||||
|
if title_match:
|
||||||
|
base_title = title_match.group(1)
|
||||||
|
year = title_match.group(2)
|
||||||
|
if year:
|
||||||
|
movie_name = base_title
|
||||||
|
|
||||||
|
movie_year = self.get_show_year_from_search()
|
||||||
|
movie_class = Movie(id_=movie_id, name=movie_name, year=movie_year, service=self.__class__)
|
||||||
|
movie_class.thumbnail_url = movie_metadata["images"]["poster"]["url"]
|
||||||
|
return Movies([movie_class])
|
||||||
|
|
||||||
|
def get_show_year_from_search(self) -> Optional[str]:
|
||||||
|
if hasattr(self, "movie_metadata") and self.movie_metadata:
|
||||||
|
query = self.movie_metadata["container"]["titles"]["en"]
|
||||||
|
else:
|
||||||
|
query = self.series_metadata["response"][0]["container"]["titles"]["en"]
|
||||||
|
|
||||||
|
response = self.session.get(
|
||||||
|
self.config["endpoints"]["search_endpoint_url"],
|
||||||
|
params={
|
||||||
|
"term": query,
|
||||||
|
"app": "100000a",
|
||||||
|
"per_page": 50,
|
||||||
|
"blocked": "true",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
search_data = response.json()
|
||||||
|
|
||||||
|
for result in search_data.get("response", []):
|
||||||
|
if result.get("id") == self.title:
|
||||||
|
distributors = result.get("distributors")
|
||||||
|
if distributors:
|
||||||
|
from_date = distributors[0].get("from")
|
||||||
|
if from_date:
|
||||||
|
return from_date[:4]
|
||||||
|
|
||||||
|
match = re.match(self.TITLE_RE, self.title)
|
||||||
|
if match:
|
||||||
|
extracted_id = match.group("id")
|
||||||
|
if extracted_id == result.get("id"):
|
||||||
|
distributors = result.get("distributors")
|
||||||
|
if distributors:
|
||||||
|
from_date = distributors[0].get("from")
|
||||||
|
if from_date:
|
||||||
|
return from_date[:4]
|
||||||
|
|
||||||
|
return "2024"
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
chinese_language_map = {
|
||||||
|
"zh": "zh-Hans",
|
||||||
|
"zt": "zh-TW",
|
||||||
|
"zh-tw": "zh-Hant",
|
||||||
|
"zh-hk": "zh-Hant",
|
||||||
|
"zh-hans": "zh-Hans",
|
||||||
|
"zh-hant": "zh-Hant",
|
||||||
|
}
|
||||||
|
|
||||||
|
original_id = title.id[:-4] if title.id.endswith("5349") else title.id
|
||||||
|
mpd_info = self.session.get(self.config["endpoints"]["mpd_api"].format(id=original_id))
|
||||||
|
mpd_data = mpd_info.json()
|
||||||
|
|
||||||
|
try:
|
||||||
|
mpd_url = mpd_data["queue"][1]["url"]
|
||||||
|
except (KeyError, IndexError):
|
||||||
|
self.log.info("Episode not yet available\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
mpd_lang = mpd_data["video"]["origin"]["language"].lower()
|
||||||
|
if mpd_lang == "pt":
|
||||||
|
mpd_lang_mapped = "pt-br"
|
||||||
|
elif mpd_lang in chinese_language_map:
|
||||||
|
mpd_lang_mapped = chinese_language_map[mpd_lang]
|
||||||
|
else:
|
||||||
|
mpd_lang_mapped = mpd_lang
|
||||||
|
|
||||||
|
license_url = json.loads(base64.b64decode(mpd_data["drm"]).decode("utf-8", "ignore"))["dt3"]
|
||||||
|
tracks = DASH.from_url(url=mpd_url).to_tracks(language=mpd_lang_mapped)
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
track.data["license_url"] = license_url
|
||||||
|
|
||||||
|
for track in tracks.audio:
|
||||||
|
track.data["original_language"] = track.language
|
||||||
|
track.language = Language.make(language=mpd_lang_mapped)
|
||||||
|
|
||||||
|
tracks.subtitles.clear()
|
||||||
|
|
||||||
|
def clean_language_label(label: str) -> str:
|
||||||
|
label = re.sub(r"<[^>]+>", "", label)
|
||||||
|
label = re.sub(r"\s*\(\d+%\)", "", label)
|
||||||
|
return label.strip()
|
||||||
|
|
||||||
|
# Download thumbnail if available
|
||||||
|
if hasattr(title, "thumbnail_url") and title.thumbnail_url:
|
||||||
|
thumbnail_url = title.thumbnail_url
|
||||||
|
thumbnail_name = f"{mpd_data['video']['container']['titles']['en']} Episode {mpd_data['video']['number']} thumbnail"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Use Attachment.from_url to create and download the thumbnail
|
||||||
|
thumbnail_attachment = Attachment.from_url(
|
||||||
|
url=thumbnail_url,
|
||||||
|
name=thumbnail_name,
|
||||||
|
mime_type="image/jpeg",
|
||||||
|
description="Thumbnail",
|
||||||
|
session=self.session,
|
||||||
|
)
|
||||||
|
if not hasattr(tracks, "attachments"):
|
||||||
|
tracks.attachments = []
|
||||||
|
tracks.attachments.append(thumbnail_attachment)
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f"Failed to download thumbnail: {e}")
|
||||||
|
else:
|
||||||
|
self.log.warning("Thumbnail URL not available for this title.")
|
||||||
|
|
||||||
|
# Handle subtitles
|
||||||
|
stream_subtitles = mpd_data.get("streamSubtitles", {}).get("dash", [])
|
||||||
|
if not stream_subtitles:
|
||||||
|
self.log.warning("No subtitles available in 'streamSubtitles.dash'.")
|
||||||
|
else:
|
||||||
|
for sub in stream_subtitles:
|
||||||
|
if sub.get("percentage", 0) > 95 and sub.get("kind") == "subtitles":
|
||||||
|
language_code_raw = sub.get("srclang", "").lower()
|
||||||
|
language_label = sub.get("label", language_code_raw)
|
||||||
|
language_name = clean_language_label(language_label)
|
||||||
|
|
||||||
|
if language_code_raw.startswith("z"):
|
||||||
|
language_code_mapped = chinese_language_map.get(language_code_raw, language_code_raw)
|
||||||
|
script = "Simplified" if language_code_mapped == "zh-Hans" else "Traditional"
|
||||||
|
language_name = f"Chinese ({script})"
|
||||||
|
elif language_code_raw == "pt":
|
||||||
|
language_code_mapped = "pt-br"
|
||||||
|
else:
|
||||||
|
language_code_mapped = language_code_raw
|
||||||
|
|
||||||
|
is_original = language_code_mapped == mpd_lang_mapped
|
||||||
|
subtitle_id = f"{title.id}_{sub.get('id', '')}_{language_code_mapped}"
|
||||||
|
|
||||||
|
subtitle_track = Subtitle(
|
||||||
|
id_=subtitle_id,
|
||||||
|
url=sub["src"],
|
||||||
|
codec=Subtitle.Codec.WebVTT,
|
||||||
|
language=language_code_mapped,
|
||||||
|
is_original_lang=is_original,
|
||||||
|
forced=False,
|
||||||
|
sdh=False,
|
||||||
|
name=language_name,
|
||||||
|
)
|
||||||
|
if sub.get("default"):
|
||||||
|
subtitle_track.default = True
|
||||||
|
tracks.add(subtitle_track, warn_only=True)
|
||||||
|
|
||||||
|
if not tracks.subtitles:
|
||||||
|
self.log.error("No Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
if not any(sub.language.language == "en" for sub in tracks.subtitles):
|
||||||
|
self.log.error("No English Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
if track not in tracks.attachments:
|
||||||
|
track.downloader = n_m3u8dl_re
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, *_, **__) -> Chapters:
|
||||||
|
return Chapters()
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, challenge: bytes, track: AnyTrack, *_, **__) -> bytes | str:
|
||||||
|
return self.get_widevine_license(challenge, track)
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, track: AnyTrack, *_, **__) -> bytes:
|
||||||
|
return self.session.post(url=track.data["license_url"], data=challenge).content
|
||||||
26
services/VIKI/config.yaml
Normal file
26
services/VIKI/config.yaml
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
endpoints:
|
||||||
|
episode_metadata: "https://api.viki.io/v4/containers/{id}/episodes.json?direction=dsc&with_upcoming=true&blocked=true&sort=number&page={pagenumber}&per_page=100&app=100531a"
|
||||||
|
video_metadata: "https://api.viki.io/v4/videos/{id}.json?app=100531a"
|
||||||
|
mpd_api: "https://www.viki.com/api/videos/{id}"
|
||||||
|
search_endpoint_url: "https://api.viki.io/v4/search.json"
|
||||||
|
|
||||||
|
manifest: "https://api.viki.io/v5/playback_streams/{contentID}.json?drms=dt3&device_id={deviceID}&app=100531a&token={token}"
|
||||||
|
movie_title: "https://api.viki.io/v4/films/{contentID}.json?film_id={contentID}&app=100531a&token={token}"
|
||||||
|
series_title: "https://api.viki.io/v4/series/{contentID}/episodes.json?&page={page}&per_page=24&with_paging=true&app=100531a&token={token}"
|
||||||
|
get_license_api: "https://api.viki.io/v5/videos/{contentID}/drms.json?offline=false&stream_ids={stream_id}&dt=dt3&device_id={deviceID}&app=100531a&token={token}"
|
||||||
|
login_code: "https://api.viki.io/v5/devices.json?app=100531a"
|
||||||
|
verify: "https://api.viki.io/v5/devices/{code}.json?device_code={code}&type=androidtv&app=100531a"
|
||||||
|
session_token: "https://api.viki.io/v5/sessions.json?app=100531a"
|
||||||
|
|
||||||
|
browser:
|
||||||
|
headers:
|
||||||
|
user-agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36"
|
||||||
|
x-viki-app-ver: "14.10.0"
|
||||||
|
x-viki-as-id: "100000a-1709757058757-0fb4be98-a04e-47b2-a80b-2dfe75cc6376"
|
||||||
|
|
||||||
|
# New separate headers for TV device login
|
||||||
|
tv_login_headers:
|
||||||
|
accept: "*/*"
|
||||||
|
user-agent: "okhttp/4.10.0"
|
||||||
|
x-viki-app-ver: "23.5.0"
|
||||||
|
host: "api.viki.io"
|
||||||
814
services/VIU/__init__.py
Normal file
814
services/VIU/__init__.py
Normal file
@ -0,0 +1,814 @@
|
|||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
import uuid
|
||||||
|
import re
|
||||||
|
import urllib.parse
|
||||||
|
import click
|
||||||
|
import sys
|
||||||
|
import langcodes
|
||||||
|
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.tracks import Tracks, Subtitle, Attachment
|
||||||
|
from devine.core.utilities import is_close_match
|
||||||
|
from devine.core.manifests import HLS
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
|
class VIU(Service):
|
||||||
|
"""
|
||||||
|
Service code for VIU streaming service (https://viu.com).
|
||||||
|
|
||||||
|
Authorization: Username-Password, None
|
||||||
|
Security: HD@L3, NonDRM doesn't seem to care about releases.
|
||||||
|
|
||||||
|
VIU has some regions supported:
|
||||||
|
- 1: ID, MY
|
||||||
|
- 2: SG, HK, TH, PH
|
||||||
|
1 & 2 has different api
|
||||||
|
|
||||||
|
Author: unnamed improved by @sp4rk.y
|
||||||
|
last update: 2025-01-07
|
||||||
|
notes: Works fine for any region, but doesn't work on movies yet
|
||||||
|
"""
|
||||||
|
|
||||||
|
# GEOFENCE = ("sg","id","my","hk")
|
||||||
|
|
||||||
|
# Static method, this method belongs to the class
|
||||||
|
@staticmethod
|
||||||
|
# The command name, must much the service tag (and by extension the service folder)
|
||||||
|
@click.command(name="VIU", short_help="https://viu.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.option("-l", "--lang", default="kor", help="Specify language for metadata")
|
||||||
|
@click.option("-nt", "--notitle", is_flag=True, default=False, help="Dont grab episode title.")
|
||||||
|
@click.option("-q", "--quality", type=int, default=1080, help="Desired quality (e.g., 1080, 720, 480, 240)")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return VIU(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title, lang: str, notitle: bool, quality: int):
|
||||||
|
self.url = title
|
||||||
|
self.title = self.parse_input(title)
|
||||||
|
self.region = title.split("/")[4]
|
||||||
|
self.notitle = notitle
|
||||||
|
self.lang = lang
|
||||||
|
self.token = ""
|
||||||
|
self._auth_codes = {}
|
||||||
|
self._user_token = None
|
||||||
|
self.quality = quality
|
||||||
|
self.ctx = ctx
|
||||||
|
|
||||||
|
if self.region == "id":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 8
|
||||||
|
self.area_id = 1000
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 2
|
||||||
|
self.ut = 0
|
||||||
|
elif self.region == "my":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 7
|
||||||
|
self.area_id = 1001
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 2
|
||||||
|
self.ut = 0
|
||||||
|
elif self.region == "sg":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 3
|
||||||
|
self.area_id = 2
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 2
|
||||||
|
self.ut = 0
|
||||||
|
elif self.region == "ph":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 3
|
||||||
|
self.area_id = 5
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 1
|
||||||
|
self.ut = 2
|
||||||
|
elif self.region == "th":
|
||||||
|
self.lang_flag_id = 4
|
||||||
|
self.lang_flag_id_2 = 4
|
||||||
|
self.area_id = 4
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 1
|
||||||
|
self.ut = 0
|
||||||
|
elif self.region == "za":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 3
|
||||||
|
self.area_id = 1006
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 1
|
||||||
|
self.ut = 0
|
||||||
|
elif self.region == "hk":
|
||||||
|
self.lang_flag_id = 3
|
||||||
|
self.lang_flag_id_2 = 3
|
||||||
|
self.area_id = 1
|
||||||
|
self.os_flag_id = 1
|
||||||
|
self.os_flag_id_2 = 1
|
||||||
|
self.ut = 0
|
||||||
|
|
||||||
|
self.lang = "kr"
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
self.credentials = credential
|
||||||
|
self.session.headers.update(
|
||||||
|
{"Referer": "https://viu.com/"} # headers Origin make 403 error
|
||||||
|
)
|
||||||
|
self.log.info(" + Downloading without an account")
|
||||||
|
|
||||||
|
def _fetch_metadata(self):
|
||||||
|
page = self.session.get(self.url, allow_redirects=True)
|
||||||
|
soup = BeautifulSoup(page.text, "html.parser")
|
||||||
|
|
||||||
|
for tag in soup.find_all("script", {"id": "__NEXT_DATA__"}):
|
||||||
|
try:
|
||||||
|
data = json.loads(tag.text)
|
||||||
|
fallback_data = data["props"]["pageProps"].get("fallback", {})
|
||||||
|
|
||||||
|
# Search through fallback keys to find the one that starts with @"PRODUCT_ALT_LANG_LIST"
|
||||||
|
for key, val in fallback_data.items():
|
||||||
|
if key.startswith('@"PRODUCT_ALT_LANG_LIST'):
|
||||||
|
products = val["data"]["product"]
|
||||||
|
for p in products:
|
||||||
|
if p["area_id"] == str(self.area_id) and p["language_flag_id"] == str(self.lang_flag_id):
|
||||||
|
self.series_id = p["series_id"]
|
||||||
|
self.product_id = p["product_id"]
|
||||||
|
self.main_title = p["series_name"]
|
||||||
|
break
|
||||||
|
break
|
||||||
|
|
||||||
|
except (KeyError, json.JSONDecodeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_titles(self) -> Titles_T:
|
||||||
|
res = self.session.get(url=self.url, allow_redirects=True)
|
||||||
|
try:
|
||||||
|
vod_id = self.url.split("/vod/")[1].split("/")[0]
|
||||||
|
self.title = vod_id
|
||||||
|
self.log.info(f" + Region: {self.region}")
|
||||||
|
self.log.debug(f" + Area_id: {self.area_id}")
|
||||||
|
self.log.debug(f" + Language_flag_id: {self.lang_flag_id}")
|
||||||
|
except Exception:
|
||||||
|
self.log.exit(f" - Error, response: {res.text}")
|
||||||
|
|
||||||
|
self._fetch_metadata()
|
||||||
|
|
||||||
|
if self.region.lower() == "id":
|
||||||
|
self.session.headers.update({"X-Forwarded-For": "139.195.232.194"})
|
||||||
|
meta_res = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
headers={
|
||||||
|
"authority": "api-gateway-global.viu.com",
|
||||||
|
"accept": "application/json, text/plain, */*",
|
||||||
|
"accept-language": "en-US,en;q=0.9",
|
||||||
|
"authorization": f"Bearer {self.token}",
|
||||||
|
"origin": "https://www.viu.com",
|
||||||
|
"referer": "https://www.viu.com/",
|
||||||
|
},
|
||||||
|
params={
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"areaId": self.area_id,
|
||||||
|
"languageFlagId": self.lang_flag_id,
|
||||||
|
"ut": "0",
|
||||||
|
"countryCode": self.region,
|
||||||
|
"r": "/vod/product-list",
|
||||||
|
"os_flag_id": self.os_flag_id,
|
||||||
|
"series_id": self.series_id,
|
||||||
|
"size": "-1",
|
||||||
|
"sort": "desc",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = meta_res.json()["data"]
|
||||||
|
movie_data_list = data.get("product_list", [])
|
||||||
|
except Exception as e:
|
||||||
|
self.log.error(f" - Error in region-specific request: {e}")
|
||||||
|
self.log.info(f" - Response: {meta_res.text}")
|
||||||
|
sys.exit()
|
||||||
|
else:
|
||||||
|
self.session.headers.update({"X-Forwarded-For": "103.62.48.237"})
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
headers={
|
||||||
|
"authority": "api-gateway-global.viu.com",
|
||||||
|
"accept": "application/json, text/plain, */*",
|
||||||
|
"accept-language": "en-US,en;q=0.9",
|
||||||
|
"authorization": f"Bearer {self.token}",
|
||||||
|
"origin": "https://www.viu.com",
|
||||||
|
"referer": "https://www.viu.com/",
|
||||||
|
},
|
||||||
|
params={
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"areaId": self.area_id,
|
||||||
|
"languageFlagId": self.lang_flag_id,
|
||||||
|
"ut": "0",
|
||||||
|
"countryCode": self.region,
|
||||||
|
"r": "/vod/product-list",
|
||||||
|
"os_flag_id": self.os_flag_id,
|
||||||
|
"series_id": self.series_id,
|
||||||
|
"size": "-1",
|
||||||
|
"sort": "desc",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = r.json()["data"]
|
||||||
|
movie_data_list = data["product_list"]
|
||||||
|
except Exception:
|
||||||
|
self.log.info(f" - Error, response: {r.text}")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
data = None
|
||||||
|
content_type = 0
|
||||||
|
|
||||||
|
# Get the last item in the movie_data_list for backward compatibility
|
||||||
|
if movie_data_list:
|
||||||
|
data = movie_data_list[-1]
|
||||||
|
content_type = int(data.get("is_movie", 0))
|
||||||
|
if self.region in ["id", "my"]:
|
||||||
|
stream_info = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
params={
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"areaId": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"languageFlagId": self.lang_flag_id,
|
||||||
|
"r": "/vod/detail",
|
||||||
|
"countryCode": self.region,
|
||||||
|
"ut": "0",
|
||||||
|
"product_id": data["product_id"],
|
||||||
|
"os_flag_id": self.os_flag_id,
|
||||||
|
},
|
||||||
|
).json()["data"]
|
||||||
|
else:
|
||||||
|
stream_info = self.session.get(
|
||||||
|
url=self.config["endpoints"]["ott"].format(region=self.region),
|
||||||
|
params={
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"r": "vod/ajax-detail",
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"product_id": data["product_id"],
|
||||||
|
},
|
||||||
|
).json()["data"]
|
||||||
|
|
||||||
|
self.lang = stream_info["series"].get("series_language", "ko")
|
||||||
|
|
||||||
|
self.log.info(f" + Detected language: {self.lang}")
|
||||||
|
|
||||||
|
product_type = "movie" if content_type == 1 else "series"
|
||||||
|
self.log.info(f" + Product type: {product_type}")
|
||||||
|
|
||||||
|
if product_type == "movie":
|
||||||
|
for x in movie_data_list:
|
||||||
|
x["product_id"]
|
||||||
|
|
||||||
|
r = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
headers={
|
||||||
|
"authority": "api-gateway-global.viu.com",
|
||||||
|
"accept": "application/json, text/plain, */*",
|
||||||
|
"accept-language": "en-US,en;q=0.9",
|
||||||
|
"authorization": f"Bearer {self.token}",
|
||||||
|
"origin": "https://www.viu.com",
|
||||||
|
"referer": "https://www.viu.com/",
|
||||||
|
},
|
||||||
|
params={
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"areaId": self.area_id,
|
||||||
|
"languageFlagId": self.lang_flag_id,
|
||||||
|
"countryCode": self.region,
|
||||||
|
"ut": "0",
|
||||||
|
"r": "/vod/detail",
|
||||||
|
"product_id": self.product_id,
|
||||||
|
"os_flag_id": self.os_flag_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
movie_data = r.json()["data"]
|
||||||
|
except Exception:
|
||||||
|
self.log.info(f" - Error, response: {r.text}")
|
||||||
|
sys.exit()
|
||||||
|
try:
|
||||||
|
# First check if release_of_year exists in the data
|
||||||
|
year = movie_data["series"].get("release_of_year")
|
||||||
|
if not year:
|
||||||
|
# If not available, try to extract from the series name
|
||||||
|
year_match = re.search(r"(\d{4})", movie_data["series"]["name"])
|
||||||
|
if year_match:
|
||||||
|
year = year_match.group(1)
|
||||||
|
except Exception:
|
||||||
|
year = None
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie_data["current_product"]["product_id"],
|
||||||
|
service=self.__class__,
|
||||||
|
year=year,
|
||||||
|
name=movie_data["series"]["name"],
|
||||||
|
language=self.lang,
|
||||||
|
data=movie_data,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
titles_ = []
|
||||||
|
raw_title = stream_info["series"]["name"]
|
||||||
|
|
||||||
|
# First check for explicit "Season X" text
|
||||||
|
season_match = re.search(r"(Season\s+(\d+))", raw_title, re.IGNORECASE)
|
||||||
|
if season_match:
|
||||||
|
season_number = int(season_match.group(2))
|
||||||
|
series_name = raw_title.split(season_match.group(1))[0].strip()
|
||||||
|
else:
|
||||||
|
# Then check for number at end of title
|
||||||
|
number_match = re.search(r"(\d+)$", raw_title.strip())
|
||||||
|
if number_match:
|
||||||
|
season_number = int(number_match.group(1))
|
||||||
|
series_name = raw_title[: -(len(number_match.group(1)))].strip()
|
||||||
|
else:
|
||||||
|
season_number = 1
|
||||||
|
series_name = raw_title
|
||||||
|
|
||||||
|
# Get year from release_of_year or fallback to release_time
|
||||||
|
year_val = stream_info["series"].get("release_of_year")
|
||||||
|
if not year_val:
|
||||||
|
release_time = int(stream_info["series"].get("release_time", 0))
|
||||||
|
year_val = datetime.datetime.fromtimestamp(release_time, tz=datetime.timezone.utc).year
|
||||||
|
|
||||||
|
for x in sorted(movie_data_list, key=lambda x: int(x.get("number", 0))):
|
||||||
|
series_name_with_year = f"{series_name} {year_val}" if year_val else series_name
|
||||||
|
titles_.append(
|
||||||
|
Episode(
|
||||||
|
id_=x["ccs_product_id"],
|
||||||
|
title=series_name_with_year,
|
||||||
|
year=year_val,
|
||||||
|
season=season_number,
|
||||||
|
number=int(x.get("number", 0)),
|
||||||
|
service=self.__class__,
|
||||||
|
language=self.lang,
|
||||||
|
name=x.get("synopsis").split("-")[-1].strip(),
|
||||||
|
data=x,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return Series(titles_)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Title_T) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
data = title.data
|
||||||
|
|
||||||
|
if self.region in ["id", "my"]:
|
||||||
|
stream_info = {
|
||||||
|
"current_product": data,
|
||||||
|
"time_duration": data.get("time_duration", ""),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
product_id = data.get("current_product", {}).get("product_id") or data.get("product_id")
|
||||||
|
if product_id is None:
|
||||||
|
raise ValueError("product_id not found in the provided data")
|
||||||
|
|
||||||
|
stream_info = self.session.get(
|
||||||
|
url=self.config["endpoints"]["ott"].format(region=self.region),
|
||||||
|
params={
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"r": "vod/ajax-detail",
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"product_id": product_id,
|
||||||
|
},
|
||||||
|
).json()["data"]
|
||||||
|
|
||||||
|
ccs_id = stream_info.get("current_product", {}).get("ccs_product_id") or stream_info.get(
|
||||||
|
"current_product", {}
|
||||||
|
).get("current_product", {}).get("ccs_product_id")
|
||||||
|
|
||||||
|
query = {
|
||||||
|
"ccs_product_id": ccs_id,
|
||||||
|
"platform_flag_label": "phone",
|
||||||
|
"language_flag_id": self.lang_flag_id_2,
|
||||||
|
"ut": self.ut,
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"os_flag_id": self.os_flag_id_2,
|
||||||
|
"countryCode": self.region,
|
||||||
|
}
|
||||||
|
duration_limit = False
|
||||||
|
|
||||||
|
def download_playback():
|
||||||
|
stream_data = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playback"],
|
||||||
|
params=query,
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {self._auth_codes[self.region]}",
|
||||||
|
"connection": "Keep-Alive",
|
||||||
|
"content-type": "application/json",
|
||||||
|
"host": "api-gateway-global.viu.com",
|
||||||
|
"platform": "android",
|
||||||
|
"user-agent": "okhttp/3.12.1",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
return self.check_error(stream_data).get("stream")
|
||||||
|
|
||||||
|
if not self._auth_codes.get(self.region):
|
||||||
|
self._auth_codes[self.region] = self._get_token(self.region)
|
||||||
|
|
||||||
|
self.log.debug(f" + Token play: {self._auth_codes[self.region]}")
|
||||||
|
|
||||||
|
stream_data = None
|
||||||
|
try:
|
||||||
|
stream_data = download_playback()
|
||||||
|
except (Exception, KeyError):
|
||||||
|
token = self._login(self.region)
|
||||||
|
self.log.debug(f" + Token login: {token}")
|
||||||
|
if token is not None:
|
||||||
|
query["identity"] = token
|
||||||
|
else:
|
||||||
|
# The content is Preview or for VIP only.
|
||||||
|
# We can try to bypass the duration which is limited to 3mins only
|
||||||
|
duration_limit, query["duration"] = True, "180"
|
||||||
|
try:
|
||||||
|
stream_data = download_playback()
|
||||||
|
except (Exception, KeyError):
|
||||||
|
if token is None:
|
||||||
|
raise
|
||||||
|
self.log.exit(
|
||||||
|
" - Login required, needs password, detected:"
|
||||||
|
f"\nuser: {self.credentials.username}\npwd: {self.credentials.password}"
|
||||||
|
)
|
||||||
|
if not stream_data:
|
||||||
|
self.log.exit(" - Cannot get stream info")
|
||||||
|
|
||||||
|
formats = []
|
||||||
|
stream_key = "airplayurl" if self.region in ["id", "my"] else "airplayurl2"
|
||||||
|
stream_urls = stream_data.get(stream_key, {})
|
||||||
|
|
||||||
|
for vid_format, stream_url in stream_urls.items():
|
||||||
|
height_match = re.search(r"s(\d+)p", vid_format)
|
||||||
|
height = int(height_match.group(1)) if height_match else None
|
||||||
|
|
||||||
|
# Bypass preview duration limit
|
||||||
|
if duration_limit:
|
||||||
|
old_stream_url = urllib.parse.urlparse(stream_url)
|
||||||
|
query = dict(urllib.parse.parse_qsl(old_stream_url.query, keep_blank_values=True))
|
||||||
|
query.update(
|
||||||
|
{
|
||||||
|
"duration": stream_info.get("time_duration") or "9999999",
|
||||||
|
"duration_start": "0",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
stream_url = old_stream_url._replace(query=urllib.parse.urlencode(query)).geturl()
|
||||||
|
|
||||||
|
formats.append({"format_id": vid_format, "url": stream_url, "height": height})
|
||||||
|
|
||||||
|
if self.ctx.params.get("quality"):
|
||||||
|
requested_qualities = self.ctx.params["quality"]
|
||||||
|
|
||||||
|
if not isinstance(requested_qualities, (list, set)):
|
||||||
|
requested_qualities = [requested_qualities]
|
||||||
|
|
||||||
|
formats = [
|
||||||
|
fmt
|
||||||
|
for fmt in formats
|
||||||
|
if fmt["height"] in requested_qualities or int(fmt["height"] * (9 / 16)) in requested_qualities
|
||||||
|
]
|
||||||
|
|
||||||
|
if not formats:
|
||||||
|
formats = self.formats
|
||||||
|
|
||||||
|
highest_resolution_format = max(formats, key=lambda x: x["height"])
|
||||||
|
url = highest_resolution_format["url"]
|
||||||
|
if "_var_" in url:
|
||||||
|
url = url.replace("_var_", "_")
|
||||||
|
|
||||||
|
tracks.add(
|
||||||
|
HLS.from_url(url=url, session=self.session).to_tracks(language=self.lang),
|
||||||
|
warn_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not tracks.subtitles:
|
||||||
|
try:
|
||||||
|
# Get product_id from either current_product or directly from data
|
||||||
|
product_id = data.get("current_product", {}).get("product_id") or data.get("product_id")
|
||||||
|
if not product_id:
|
||||||
|
self.log.error("Cannot find product_id in data")
|
||||||
|
raise ValueError("product_id not found")
|
||||||
|
|
||||||
|
subs_resp = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
params={
|
||||||
|
"r": "/vod/detail",
|
||||||
|
"product_id": product_id,
|
||||||
|
"platform_flag_label": "phone",
|
||||||
|
"language_flag_id": self.lang_flag_id,
|
||||||
|
"ut": self.ut,
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"os_flag_id": self.os_flag_id,
|
||||||
|
"countryCode": self.region,
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"authorization": f"Bearer {self.token}",
|
||||||
|
"connection": "Keep-Alive",
|
||||||
|
"content-type": "application/json",
|
||||||
|
"host": "api-gateway-global.viu.com",
|
||||||
|
"platform": "android",
|
||||||
|
"user-agent": "okhttp/3.12.1",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
subs = subs_resp["data"]["current_product"]["subtitle"]
|
||||||
|
except: # noqa: E722
|
||||||
|
subs_resp = self.session.get(
|
||||||
|
url=self.config["endpoints"]["playlist"],
|
||||||
|
params={
|
||||||
|
"platform_flag_label": "web",
|
||||||
|
"area_id": self.area_id,
|
||||||
|
"language_flag_id": self.lang_flag_id_2,
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"areaId": self.area_id,
|
||||||
|
"languageFlagId": self.lang_flag_id_2,
|
||||||
|
"countryCode": self.region.upper(),
|
||||||
|
"ut": self.ut,
|
||||||
|
"r": "/vod/product-list",
|
||||||
|
"os_flag_id": self.os_flag_id_2,
|
||||||
|
"series_id": data["series"]["series_id"],
|
||||||
|
"size": "-1",
|
||||||
|
"sort": "desc",
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"authorization": f"Bearer {self.token}",
|
||||||
|
"connection": "Keep-Alive",
|
||||||
|
"content-type": "application/json",
|
||||||
|
"host": "api-gateway-global.viu.com",
|
||||||
|
"platform": "android",
|
||||||
|
"user-agent": "okhttp/3.12.1",
|
||||||
|
},
|
||||||
|
).json()
|
||||||
|
subs = subs_resp["data"]["current_product"]["subtitle"]
|
||||||
|
for x in subs:
|
||||||
|
# Main subtitle
|
||||||
|
main_subtitle = Subtitle(
|
||||||
|
id_=f"{x['product_subtitle_id']}_{x['code']}",
|
||||||
|
url=x["url"],
|
||||||
|
codec=Subtitle.Codec.SubRip,
|
||||||
|
language=x["code"],
|
||||||
|
is_original_lang=is_close_match(x["code"], [title.language]),
|
||||||
|
forced=False,
|
||||||
|
sdh=False,
|
||||||
|
)
|
||||||
|
tracks.add(main_subtitle, warn_only=True)
|
||||||
|
|
||||||
|
# Annotation/secondary subtitle
|
||||||
|
if x.get("second_subtitle_url"):
|
||||||
|
annotation_subtitle = Subtitle(
|
||||||
|
id_=f"{x['product_subtitle_id']}_{x['code']}_annotation",
|
||||||
|
url=x["second_subtitle_url"],
|
||||||
|
codec=Subtitle.Codec.SubRip,
|
||||||
|
language=x["code"],
|
||||||
|
is_original_lang=is_close_match(x["code"], [title.language]),
|
||||||
|
forced=False,
|
||||||
|
sdh=True,
|
||||||
|
)
|
||||||
|
tracks.add(annotation_subtitle, warn_only=True)
|
||||||
|
|
||||||
|
# Update subtitle track names based on their language
|
||||||
|
for track in tracks.subtitles:
|
||||||
|
try:
|
||||||
|
if not track.name or len(track.name) == 0:
|
||||||
|
if track.language:
|
||||||
|
# Get the full language name for the subtitle track
|
||||||
|
lang_name = track.language.display_name()
|
||||||
|
|
||||||
|
# Add territory name if available
|
||||||
|
if track.language.territory:
|
||||||
|
territory_name = track.language.territory_name()
|
||||||
|
track.name = f"{lang_name}, {territory_name}"
|
||||||
|
else:
|
||||||
|
track.name = lang_name
|
||||||
|
|
||||||
|
# Add annotation/sdh indicator
|
||||||
|
if track.sdh:
|
||||||
|
track.name += " [Annotations]"
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f"Failed to update subtitle name for {track.language}: {e}")
|
||||||
|
|
||||||
|
if not tracks.subtitles:
|
||||||
|
self.log.error("No Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
if not any(sub.language.language == "en" for sub in tracks.subtitles):
|
||||||
|
self.log.error("No English Subtitles")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
thumbnail_url = data.get("cover_image_url") or data.get("poster") or data.get("thumb")
|
||||||
|
|
||||||
|
if not thumbnail_url and isinstance(data.get("current_product"), dict):
|
||||||
|
thumbnail_url = data["current_product"].get("cover_image_url")
|
||||||
|
|
||||||
|
if not thumbnail_url:
|
||||||
|
images = data.get("images", [])
|
||||||
|
if images:
|
||||||
|
thumbnail_data = images[0]
|
||||||
|
thumbnail_url = thumbnail_data.get("url")
|
||||||
|
|
||||||
|
if thumbnail_url:
|
||||||
|
# Get synopsis from data or current_product, fallback to title
|
||||||
|
synopsis = None
|
||||||
|
if isinstance(data.get("current_product"), dict) and data["current_product"].get("synopsis"):
|
||||||
|
synopsis = data["current_product"]["synopsis"]
|
||||||
|
elif data.get("synopsis"):
|
||||||
|
synopsis = data["synopsis"]
|
||||||
|
else:
|
||||||
|
synopsis = getattr(title, "name", "unknown")
|
||||||
|
|
||||||
|
# Sanitize the filename by replacing problematic characters
|
||||||
|
sanitized_synopsis = re.sub(r"[/\\]", "_", synopsis)
|
||||||
|
thumbnail_name = f"{sanitized_synopsis} thumbnail"
|
||||||
|
|
||||||
|
try:
|
||||||
|
thumbnail_attachment = Attachment.from_url(
|
||||||
|
url=thumbnail_url,
|
||||||
|
name=thumbnail_name,
|
||||||
|
mime_type="image/avif",
|
||||||
|
description="Thumbnail",
|
||||||
|
session=self.session,
|
||||||
|
)
|
||||||
|
tracks.attachments.append(thumbnail_attachment)
|
||||||
|
except Exception as e:
|
||||||
|
self.log.warning(f"Failed to download thumbnail: {e}")
|
||||||
|
else:
|
||||||
|
self.log.warning("Thumbnail not found for title.")
|
||||||
|
|
||||||
|
for video in tracks.videos:
|
||||||
|
if not video.language.is_valid():
|
||||||
|
video.language = langcodes.Language.get(self.lang)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, title: Title_T, **_: Any) -> bytes:
|
||||||
|
return self.session.post(
|
||||||
|
url=self.config["endpoints"]["license"].format(id=title.id),
|
||||||
|
headers={
|
||||||
|
"authorization": self.token_lic or self.config["auth"],
|
||||||
|
"actiontype": "s",
|
||||||
|
"drm_level": "l3",
|
||||||
|
"hdcp_level": "null",
|
||||||
|
"lang_id": "en",
|
||||||
|
"languageid": "en",
|
||||||
|
"os_ver": "10",
|
||||||
|
"x-client": "browser",
|
||||||
|
"x-request-id": str(uuid.uuid4()),
|
||||||
|
"x-session-id": self.sessionid,
|
||||||
|
},
|
||||||
|
data=challenge,
|
||||||
|
).content
|
||||||
|
|
||||||
|
def parse_input(self, input_):
|
||||||
|
re_product = r"vod\/(\d+)\/"
|
||||||
|
re_playlist = r".+playlist-(\d+)"
|
||||||
|
# re_playlist2 = r".+video.+-(\d+)"
|
||||||
|
re_playlist2 = r"containerId=(\d+)"
|
||||||
|
|
||||||
|
product_id = re.search(re_product, input_)
|
||||||
|
playlist_id = re.search(re_playlist, input_)
|
||||||
|
playlist2_id = re.search(re_playlist2, input_)
|
||||||
|
|
||||||
|
if product_id:
|
||||||
|
self.jenis = "product_id"
|
||||||
|
input_id = product_id.group(1)
|
||||||
|
elif playlist_id or playlist2_id:
|
||||||
|
self.jenis = "playlist_id"
|
||||||
|
input_ = playlist_id or playlist2_id
|
||||||
|
input_id = input_.group(1)
|
||||||
|
else:
|
||||||
|
self.jenis = "playlist_id_eps"
|
||||||
|
input_id = input_.split("-")[-1]
|
||||||
|
|
||||||
|
return input_id
|
||||||
|
|
||||||
|
def check_error(self, response):
|
||||||
|
code = response.get("status", {}).get("code", 0)
|
||||||
|
if code > 0:
|
||||||
|
message = response.get("status", {}).get("message", "Unknown error")
|
||||||
|
self.log.error(f" - API Error: Code {code} - {message} - Attempting to bypass")
|
||||||
|
raise Exception(f"API Error: {message}")
|
||||||
|
return response.get("data") or {}
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
self.sessionid = str(uuid.uuid4())
|
||||||
|
self.deviceid = str(uuid.uuid4())
|
||||||
|
res = self.session.post(
|
||||||
|
url=self.config["endpoints"]["token"],
|
||||||
|
params={
|
||||||
|
"ver": "1.0",
|
||||||
|
"fmt": "json",
|
||||||
|
"aver": "5.0",
|
||||||
|
"appver": "2.0",
|
||||||
|
"appid": "viu_desktop",
|
||||||
|
"platform": "desktop",
|
||||||
|
"iid": str(uuid.uuid4()),
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"accept": "application/json; charset=utf-8",
|
||||||
|
"content-type": "application/json; charset=UTF-8",
|
||||||
|
"x-session-id": self.sessionid,
|
||||||
|
"Sec-Fetch-Mode": "cors",
|
||||||
|
"x-client": "browser",
|
||||||
|
},
|
||||||
|
json={"deviceId": self.deviceid},
|
||||||
|
)
|
||||||
|
if res.ok:
|
||||||
|
return res.json()["token"]
|
||||||
|
else:
|
||||||
|
self.log.exit(f" - Cannot get token, response: {res.text}")
|
||||||
|
|
||||||
|
def _get_token(self, country_code):
|
||||||
|
try:
|
||||||
|
rand = "".join(random.choices("0123456789", k=10))
|
||||||
|
uuid_str = str(uuid.uuid4())
|
||||||
|
|
||||||
|
response = self.session.post(
|
||||||
|
url=self.config["endpoints"]["token2"],
|
||||||
|
params={"v": f"{rand}000&"},
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
data=json.dumps(
|
||||||
|
{
|
||||||
|
"countryCode": country_code.upper(),
|
||||||
|
"platform": "browser",
|
||||||
|
"platformFlagLabel": "web",
|
||||||
|
"language": "en",
|
||||||
|
"uuid": uuid_str,
|
||||||
|
"carrierId": "0",
|
||||||
|
}
|
||||||
|
).encode("utf-8"),
|
||||||
|
)
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()["token"]
|
||||||
|
except Exception as e:
|
||||||
|
self.log.error(f" - Token retrieval failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _login(self, country_code):
|
||||||
|
if not self._user_token:
|
||||||
|
try:
|
||||||
|
user = Credential.username
|
||||||
|
pwd = Credential.password
|
||||||
|
except Exception:
|
||||||
|
user = None
|
||||||
|
pwd = None
|
||||||
|
if user == "empty" or not user:
|
||||||
|
return
|
||||||
|
if pwd == "empty" or not user:
|
||||||
|
return
|
||||||
|
self.log.debug(f" + auth: {self._auth_codes[country_code]}")
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {self._auth_codes[country_code]}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
data = self.session.post(
|
||||||
|
url=self.config["endpoints"]["validate"],
|
||||||
|
headers=headers,
|
||||||
|
data=json.dumps({"principal": user, "provider": "email"}).encode(),
|
||||||
|
).json()
|
||||||
|
if not data.get("exists"):
|
||||||
|
self.log.exit(" - Invalid email address")
|
||||||
|
|
||||||
|
data = self.session.post(
|
||||||
|
url=self.config["endpoints"]["login"],
|
||||||
|
headers=headers,
|
||||||
|
data=json.dumps(
|
||||||
|
{
|
||||||
|
"email": user,
|
||||||
|
"password": pwd,
|
||||||
|
"provider": "email",
|
||||||
|
}
|
||||||
|
).encode(),
|
||||||
|
).json()
|
||||||
|
self.check_error(data)
|
||||||
|
self._user_token = data.get("identity")
|
||||||
|
# need to update with valid user's token else will throw an error again
|
||||||
|
self._get_token[country_code] = data.get("token")
|
||||||
|
|
||||||
|
return self._user_token
|
||||||
23
services/VIU/config.yaml
Normal file
23
services/VIU/config.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# endpoints:
|
||||||
|
# content: 'https://um.viuapi.io/drm/v1/content/{videoid}'
|
||||||
|
# gateway: 'https://api-gateway-global.viu.com/api/mobile'
|
||||||
|
# license: 'https://um.viuapi.io/drm/v1/license/{id}'
|
||||||
|
# load: 'https://viu.com/ott/web/api/{type}/load'
|
||||||
|
# login: 'https://api-gateway-global.viu.com/api/auth/login'
|
||||||
|
# ott: 'https://www.viu.com/ott/{region}/index.php'
|
||||||
|
# playback: 'https://api-gateway-global.viu.com/api/playback/distribute'
|
||||||
|
# play_info: 'http://www.viu.com/ott/{country_code}/{lang_code}/vod/{item_id}/'
|
||||||
|
# token: 'https://um.viuapi.io/user/identity'
|
||||||
|
# token2: 'https://api-gateway-global.viu.com/api/auth/token'
|
||||||
|
# validate: 'https://api-gateway-global.viu.com/api/account/validate'
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
auth: 'https://api-gateway-global.viu.com/api/auth/token'
|
||||||
|
login: 'https://api-gateway-global.viu.com/api/auth/login'
|
||||||
|
playlist: 'https://api-gateway-global.viu.com/api/mobile'
|
||||||
|
playback: 'https://api-gateway-global.viu.com/api/playback/distribute'
|
||||||
|
token: 'https://um.viuapi.io/user/identity'
|
||||||
|
token2: 'https://api-gateway-global.viu.com/api/auth/token'
|
||||||
|
ott: 'https://www.viu.com/ott/{region}/index.php'
|
||||||
|
license: 'https://um.viuapi.io/drm/v1/license/{id}'
|
||||||
|
validate: 'https://api-gateway-global.viu.com/api/account/validate'
|
||||||
237
services/WTCH/__init__.py
Normal file
237
services/WTCH/__init__.py
Normal file
@ -0,0 +1,237 @@
|
|||||||
|
import re
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import click
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from typing import Optional, Union
|
||||||
|
from http.cookiejar import CookieJar
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
from devine.core.config import config
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Series
|
||||||
|
from devine.core.tracks import Tracks
|
||||||
|
from devine.core.credential import Credential
|
||||||
|
from devine.core.manifests import HLS
|
||||||
|
from devine.core.tracks.attachment import Attachment
|
||||||
|
|
||||||
|
|
||||||
|
class WTCH(Service):
|
||||||
|
"""
|
||||||
|
Service code for watchertv.com
|
||||||
|
Author: @sp4rk.y
|
||||||
|
|
||||||
|
Authorization: Cookies or Credentials
|
||||||
|
Security: None
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?watchertv\.com/)([^/]+)(?:/.*)?$"
|
||||||
|
SERIES_RE = r"https?://(?:www\.)?watchertv\.com/([^/]+)(?:/season:(\d+))?/?$"
|
||||||
|
EPISODE_RE = r"https?://(?:www\.)?watchertv\.com/([^/]+)/season:(\d+)/videos/([^/]+)/?$"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="WTCH", short_help="https://watchertv.com", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx, **kwargs):
|
||||||
|
return WTCH(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
|
||||||
|
def authenticate(
|
||||||
|
self,
|
||||||
|
cookies: Optional[CookieJar] = None,
|
||||||
|
credential: Optional[Credential] = None,
|
||||||
|
) -> None:
|
||||||
|
self.credentials = credential
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
self.session.cookies.update(cookies)
|
||||||
|
elif self.credentials:
|
||||||
|
login_data = {
|
||||||
|
"email": self.credentials.username,
|
||||||
|
"password": self.credentials.password,
|
||||||
|
"authenticity_token": self._get_authenticity_token(),
|
||||||
|
"utf8": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use the URL from the config
|
||||||
|
response = self.session.post(
|
||||||
|
self.config["endpoints"]["login_url"],
|
||||||
|
data=login_data,
|
||||||
|
allow_redirects=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if '<div id="watch-unauthorized"' in response.text:
|
||||||
|
self.log.error("Login failed")
|
||||||
|
raise Exception("Login failed")
|
||||||
|
else:
|
||||||
|
self.log.info("Login successful")
|
||||||
|
else:
|
||||||
|
self.log.info("No login credentials provided, proceeding without authentication")
|
||||||
|
|
||||||
|
def _get_authenticity_token(self):
|
||||||
|
signin_page = self.session.get(self.config["endpoints"]["login_url"]).text
|
||||||
|
match = re.search(r'name="authenticity_token" value="(.+?)"', signin_page)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
else:
|
||||||
|
self.log.error("Could not find authenticity token")
|
||||||
|
raise ValueError("Authenticity token not found")
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Series]:
|
||||||
|
match = re.match(self.SERIES_RE, self.title)
|
||||||
|
if match:
|
||||||
|
title_id = match.group(1)
|
||||||
|
else:
|
||||||
|
title_id = self.title
|
||||||
|
|
||||||
|
base_url = self.config["endpoints"]["episode_metadata_url"].format(title_id=title_id)
|
||||||
|
|
||||||
|
episodes = []
|
||||||
|
season_urls = []
|
||||||
|
|
||||||
|
response = self.session.get(base_url)
|
||||||
|
soup = BeautifulSoup(response.text, "html.parser")
|
||||||
|
|
||||||
|
season_select = soup.find("select", class_="js-switch-season")
|
||||||
|
if season_select:
|
||||||
|
for option in season_select.find_all("option"):
|
||||||
|
season_url = urljoin(response.url, option["value"])
|
||||||
|
season_urls.append(season_url)
|
||||||
|
else:
|
||||||
|
season_urls.append(base_url)
|
||||||
|
|
||||||
|
for season_url in season_urls:
|
||||||
|
params = {
|
||||||
|
"page": 1,
|
||||||
|
"per_page": 1000,
|
||||||
|
"html": "1",
|
||||||
|
"ajax": "1",
|
||||||
|
}
|
||||||
|
season_response = self.session.get(season_url, params=params)
|
||||||
|
season_soup = BeautifulSoup(season_response.text, "html.parser")
|
||||||
|
|
||||||
|
season_number_match = re.search(r"/season:(\d+)", season_url)
|
||||||
|
if season_number_match:
|
||||||
|
season_number = int(season_number_match.group(1))
|
||||||
|
else:
|
||||||
|
season_number = None
|
||||||
|
|
||||||
|
items = season_soup.find_all("div", class_="browse-item-card")
|
||||||
|
if not items:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
episode_link = item.find("a", class_="browse-item-link")
|
||||||
|
if episode_link:
|
||||||
|
episode_url = episode_link["href"]
|
||||||
|
episode_data_json = episode_link.get("data-track-event-properties")
|
||||||
|
if episode_data_json:
|
||||||
|
episode_data = json.loads(episode_data_json)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
episode_id = episode_data.get("id")
|
||||||
|
episode_title = episode_data.get("label")
|
||||||
|
|
||||||
|
episode_number_elem = item.find("span", class_="media-identifier media-episode")
|
||||||
|
if episode_number_elem:
|
||||||
|
episode_number_text = episode_number_elem.text.strip()
|
||||||
|
episode_number_match = re.search(r"Episode (\d+)", episode_number_text)
|
||||||
|
if episode_number_match:
|
||||||
|
episode_number = int(episode_number_match.group(1))
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
show_title = self.title.split("/")[-1].replace("-", " ").title()
|
||||||
|
|
||||||
|
episode = Episode(
|
||||||
|
id_=str(episode_id),
|
||||||
|
service=self.__class__,
|
||||||
|
title=show_title,
|
||||||
|
season=season_number,
|
||||||
|
number=episode_number,
|
||||||
|
name=episode_title,
|
||||||
|
year=None,
|
||||||
|
data={"url": episode_url},
|
||||||
|
)
|
||||||
|
episodes.append(episode)
|
||||||
|
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Episode]) -> Tracks:
|
||||||
|
tracks = Tracks()
|
||||||
|
|
||||||
|
episode_url = title.data["url"]
|
||||||
|
episode_page = self.session.get(episode_url).text
|
||||||
|
|
||||||
|
embed_url_match = re.search(self.config["endpoints"]["embed_url_regex"], episode_page)
|
||||||
|
if not embed_url_match:
|
||||||
|
raise ValueError("Could not find embed_url in the episode page")
|
||||||
|
embed_url = embed_url_match.group(1).replace("&", "&")
|
||||||
|
|
||||||
|
headers = {k: v.format(episode_url=episode_url) for k, v in self.config["headers"].items()}
|
||||||
|
|
||||||
|
# Fetch the embed page content
|
||||||
|
embed_page = self.session.get(embed_url, headers=headers).text
|
||||||
|
|
||||||
|
# Extract the config URL using regex
|
||||||
|
config_url_match = re.search(self.config["endpoints"]["config_url_regex"], embed_page)
|
||||||
|
if config_url_match:
|
||||||
|
config_url = config_url_match.group(1).replace("\\u0026", "&")
|
||||||
|
else:
|
||||||
|
raise ValueError("Config URL not found on the embed page.")
|
||||||
|
|
||||||
|
config_data = self.session.get(config_url, headers=headers).json()
|
||||||
|
|
||||||
|
# Retrieve the CDN information from the config data
|
||||||
|
cdns = config_data["request"]["files"]["hls"]["cdns"]
|
||||||
|
default_cdn = config_data["request"]["files"]["hls"]["default_cdn"]
|
||||||
|
|
||||||
|
# Select the default CDN or fall back to the first available one
|
||||||
|
cdn = cdns.get(default_cdn) or next(iter(cdns.values()))
|
||||||
|
|
||||||
|
# Generate the MPD URL by replacing 'playlist.json' with 'playlist.mpd'
|
||||||
|
mpd_url = cdn["avc_url"].replace("playlist.json", "playlist.mpd")
|
||||||
|
|
||||||
|
tracks = HLS.from_url(url=mpd_url).to_tracks(language="en")
|
||||||
|
|
||||||
|
# Extract thumbnail URL from config_data
|
||||||
|
thumbnail_base_url = config_data["video"]["thumbs"]["base"]
|
||||||
|
thumbnail_url = f"{thumbnail_base_url}"
|
||||||
|
thumbnail_response = self.session.get(thumbnail_url)
|
||||||
|
if thumbnail_response.status_code == 200:
|
||||||
|
thumbnail_filename = f"{title.id}_thumbnail.jpg"
|
||||||
|
thumbnail_path = config.directories.temp / thumbnail_filename
|
||||||
|
|
||||||
|
# Ensure the directory exists
|
||||||
|
os.makedirs(config.directories.temp, exist_ok=True)
|
||||||
|
|
||||||
|
# Save the thumbnail file
|
||||||
|
with open(thumbnail_path, "wb") as f:
|
||||||
|
f.write(thumbnail_response.content)
|
||||||
|
|
||||||
|
# Create an Attachment object
|
||||||
|
thumbnail_attachment = Attachment(
|
||||||
|
path=thumbnail_path,
|
||||||
|
name=thumbnail_filename,
|
||||||
|
mime_type="image/jpeg",
|
||||||
|
description="Thumbnail",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the attachment to the tracks
|
||||||
|
tracks.attachments.append(thumbnail_attachment)
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, title: Union[Episode], track):
|
||||||
|
# No DRM
|
||||||
|
pass
|
||||||
15
services/WTCH/config.yaml
Normal file
15
services/WTCH/config.yaml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
endpoints:
|
||||||
|
login_url: "https://www.watchertv.com/login"
|
||||||
|
episode_metadata_url: "https://www.watchertv.com/{title_id}"
|
||||||
|
embed_url_regex: 'embed_url:\s*"([^"]+)"'
|
||||||
|
config_url_regex: 'config_url":"([^"]+)"'
|
||||||
|
|
||||||
|
headers:
|
||||||
|
referer: "{episode_url}"
|
||||||
|
user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||||
|
accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
|
||||||
|
accept_language: "en-US,en;q=0.5"
|
||||||
|
upgrade_insecure_requests: "1"
|
||||||
|
sec_fetch_dest: "iframe"
|
||||||
|
sec_fetch_mode: "navigate"
|
||||||
|
sec_fetch_site: "cross-site"
|
||||||
374
services/iP/__init__.py
Normal file
374
services/iP/__init__.py
Normal file
@ -0,0 +1,374 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
from collections.abc import Generator
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
import click
|
||||||
|
from bs4 import XMLParsedAsHTMLWarning
|
||||||
|
from click import Context
|
||||||
|
from devine.core.manifests import DASH, HLS
|
||||||
|
from devine.core.search_result import SearchResult
|
||||||
|
from devine.core.service import Service
|
||||||
|
from devine.core.titles import Episode, Movie, Movies, Series
|
||||||
|
from devine.core.tracks import Audio, Chapter, Subtitle, Tracks, Video
|
||||||
|
from devine.core.utils.collections import as_list
|
||||||
|
from devine.core.utils.sslciphers import SSLCiphers
|
||||||
|
|
||||||
|
warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning)
|
||||||
|
|
||||||
|
|
||||||
|
class iP(Service):
|
||||||
|
"""
|
||||||
|
\b
|
||||||
|
Service code for the BBC iPlayer streaming service (https://www.bbc.co.uk/iplayer).
|
||||||
|
Base code from VT, credit to original author
|
||||||
|
|
||||||
|
\b
|
||||||
|
Author: stabbedbybrick
|
||||||
|
Authorization: None
|
||||||
|
Security: None
|
||||||
|
|
||||||
|
\b
|
||||||
|
Tips:
|
||||||
|
- Use full title URL as input for best results.
|
||||||
|
- Use --list-titles before anything, iPlayer's listings are often messed up.
|
||||||
|
\b
|
||||||
|
- An SSL certificate (PEM) is required for accessing the UHD endpoint.
|
||||||
|
Specify its path using the service configuration data in the root config:
|
||||||
|
\b
|
||||||
|
services:
|
||||||
|
iP:
|
||||||
|
cert: path/to/cert
|
||||||
|
\b
|
||||||
|
- Use --range HLG to request H.265 UHD tracks
|
||||||
|
- See which titles are available in UHD:
|
||||||
|
https://www.bbc.co.uk/iplayer/help/questions/programme-availability/uhd-content
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALIASES = ("bbciplayer", "bbc", "iplayer")
|
||||||
|
GEOFENCE = ("gb",)
|
||||||
|
TITLE_RE = r"^(?:https?://(?:www\.)?bbc\.co\.uk/(?:iplayer/(?P<kind>episode|episodes)/|programmes/))?(?P<id>[a-z0-9]+)(?:/.*)?$"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@click.command(name="iP", short_help="https://www.bbc.co.uk/iplayer", help=__doc__)
|
||||||
|
@click.argument("title", type=str)
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: Context, **kwargs: Any) -> iP:
|
||||||
|
return iP(ctx, **kwargs)
|
||||||
|
|
||||||
|
def __init__(self, ctx: Context, title: str):
|
||||||
|
self.title = title
|
||||||
|
super().__init__(ctx)
|
||||||
|
self.vcodec = ctx.parent.params.get("vcodec")
|
||||||
|
self.range = ctx.parent.params.get("range_")
|
||||||
|
|
||||||
|
self.session.headers.update({"user-agent": "BBCiPlayer/5.17.2.32046"})
|
||||||
|
|
||||||
|
if self.range and self.range[0].name == "HLG" and not self.config.get("cert"):
|
||||||
|
self.log.error("HLG tracks cannot be requested without an SSL certificate")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
elif self.range and self.range[0].name == "HLG":
|
||||||
|
self.session.headers.update({"user-agent": self.config["user_agent"]})
|
||||||
|
self.vcodec = "H.265"
|
||||||
|
|
||||||
|
def search(self) -> Generator[SearchResult, None, None]:
|
||||||
|
params = {
|
||||||
|
"q": self.title,
|
||||||
|
"apikey": self.config["api_key"],
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
results = r.json()
|
||||||
|
for result in results["results"]:
|
||||||
|
yield SearchResult(
|
||||||
|
id_=result.get("uri").split(":")[-1],
|
||||||
|
title=result.get("title"),
|
||||||
|
description=result.get("synopsis"),
|
||||||
|
label="series" if result.get("type", "") == "brand" else result.get("type"),
|
||||||
|
url=result.get("url"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_titles(self) -> Union[Movies, Series]:
|
||||||
|
try:
|
||||||
|
kind, pid = (re.match(self.TITLE_RE, self.title).group(i) for i in ("kind", "id"))
|
||||||
|
except Exception:
|
||||||
|
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||||
|
|
||||||
|
data = self.get_data(pid, slice_id=None)
|
||||||
|
if data is None and kind == "episode":
|
||||||
|
return Series([self.fetch_episode(pid)])
|
||||||
|
|
||||||
|
elif data is None:
|
||||||
|
raise ValueError(f"Metadata was not found - if {pid} is an episode, use full URL as input")
|
||||||
|
|
||||||
|
if data.get("count", 0) < 2:
|
||||||
|
data = self.session.get(self.config["endpoints"]["episodes"].format(pid=pid)).json()
|
||||||
|
if not data.get("episodes"):
|
||||||
|
raise ValueError(f"Metadata was not found for {pid}")
|
||||||
|
|
||||||
|
movie = data.get("episodes")[0]
|
||||||
|
|
||||||
|
return Movies(
|
||||||
|
[
|
||||||
|
Movie(
|
||||||
|
id_=movie.get("id"),
|
||||||
|
name=movie.get("title"),
|
||||||
|
year=movie.get("release_date_time", "").split("-")[0],
|
||||||
|
service=self.__class__,
|
||||||
|
language="en",
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
seasons = [self.get_data(pid, x["id"]) for x in data["slices"] or [{"id": None}]]
|
||||||
|
episode_ids = [
|
||||||
|
episode.get("episode", {}).get("id")
|
||||||
|
for season in seasons
|
||||||
|
for episode in season["entities"]["results"]
|
||||||
|
if not episode.get("episode", {}).get("live")
|
||||||
|
and episode.get("episode", {}).get("id") is not None
|
||||||
|
]
|
||||||
|
episodes = self.get_episodes(episode_ids)
|
||||||
|
return Series(episodes)
|
||||||
|
|
||||||
|
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||||
|
r = self.session.get(url=self.config["endpoints"]["playlist"].format(pid=title.id))
|
||||||
|
r.raise_for_status()
|
||||||
|
playlist = r.json()
|
||||||
|
|
||||||
|
versions = playlist.get("allAvailableVersions")
|
||||||
|
if not versions:
|
||||||
|
# If API returns no versions, try to fetch from site source code
|
||||||
|
r = self.session.get(self.config["base_url"].format(type="episode", pid=title.id))
|
||||||
|
redux = re.search("window.__IPLAYER_REDUX_STATE__ = (.*?);</script>", r.text).group(1)
|
||||||
|
data = json.loads(redux)
|
||||||
|
versions = [{"pid": x.get("id") for x in data.get("versions", {}) if not x.get("kind") == "audio-described"}]
|
||||||
|
|
||||||
|
if self.vcodec == "H.265":
|
||||||
|
versions = [{"pid": playlist.get("defaultAvailableVersion", {}).get("pid")}]
|
||||||
|
|
||||||
|
if not versions:
|
||||||
|
self.log.error(" - No available versions for this title was found")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
connections = [self.check_all_versions(version) for version in (x.get("pid") for x in versions)]
|
||||||
|
quality = [connection.get("height") for i in connections for connection in i if connection.get("height")]
|
||||||
|
max_quality = max((h for h in quality if h < "1080"), default=None)
|
||||||
|
|
||||||
|
media = next(
|
||||||
|
(i for i in connections if any(connection.get("height") == max_quality for connection in i)),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not media:
|
||||||
|
self.log.error(" - Selection unavailable. Title doesn't exist or your IP address is blocked")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
connection = {}
|
||||||
|
for video in [x for x in media if x["kind"] == "video"]:
|
||||||
|
connections = sorted(video["connection"], key=lambda x: x["priority"])
|
||||||
|
if self.vcodec == "H.265":
|
||||||
|
connection = connections[0]
|
||||||
|
else:
|
||||||
|
connection = next(
|
||||||
|
x for x in connections if x["supplier"] == "mf_akamai" and x["transferFormat"] == "dash"
|
||||||
|
)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
if not self.vcodec == "H.265":
|
||||||
|
if connection["transferFormat"] == "dash":
|
||||||
|
connection["href"] = "/".join(
|
||||||
|
connection["href"].replace("dash", "hls").split("?")[0].split("/")[0:-1] + ["hls", "master.m3u8"]
|
||||||
|
)
|
||||||
|
connection["transferFormat"] = "hls"
|
||||||
|
elif connection["transferFormat"] == "hls":
|
||||||
|
connection["href"] = "/".join(
|
||||||
|
connection["href"].replace(".hlsv2.ism", "").split("?")[0].split("/")[0:-1] + ["hls", "master.m3u8"]
|
||||||
|
)
|
||||||
|
|
||||||
|
if connection["transferFormat"] != "hls":
|
||||||
|
raise ValueError(f"Unsupported video media transfer format {connection['transferFormat']!r}")
|
||||||
|
|
||||||
|
if connection["transferFormat"] == "dash":
|
||||||
|
tracks = DASH.from_url(url=connection["href"], session=self.session).to_tracks(language=title.language)
|
||||||
|
elif connection["transferFormat"] == "hls":
|
||||||
|
tracks = HLS.from_url(url=connection["href"], session=self.session).to_tracks(language=title.language)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported video media transfer format {connection['transferFormat']!r}")
|
||||||
|
|
||||||
|
for video in tracks.videos:
|
||||||
|
# UHD DASH manifest has no range information, so we add it manually
|
||||||
|
if video.codec == Video.Codec.HEVC:
|
||||||
|
video.range = Video.Range.HLG
|
||||||
|
|
||||||
|
if any(re.search(r"-audio_\w+=\d+", x) for x in as_list(video.url)):
|
||||||
|
# create audio stream from the video stream
|
||||||
|
audio_url = re.sub(r"-video=\d+", "", as_list(video.url)[0])
|
||||||
|
audio = Audio(
|
||||||
|
# use audio_url not video url, as to ignore video bitrate in ID
|
||||||
|
id_=hashlib.md5(audio_url.encode()).hexdigest()[0:7],
|
||||||
|
url=audio_url,
|
||||||
|
codec=Audio.Codec.from_codecs(video.data["hls"]["playlist"].stream_info.codecs),
|
||||||
|
language=video.data["hls"]["playlist"].media[0].language,
|
||||||
|
bitrate=int(self.find(r"-audio_\w+=(\d+)", as_list(video.url)[0]) or 0),
|
||||||
|
channels=video.data["hls"]["playlist"].media[0].channels,
|
||||||
|
descriptive=False, # Not available
|
||||||
|
descriptor=Audio.Descriptor.HLS,
|
||||||
|
drm=video.drm,
|
||||||
|
data=video.data,
|
||||||
|
)
|
||||||
|
if not tracks.exists(by_id=audio.id):
|
||||||
|
# some video streams use the same audio, so natural dupes exist
|
||||||
|
tracks.add(audio)
|
||||||
|
# remove audio from the video stream
|
||||||
|
video.url = [re.sub(r"-audio_\w+=\d+", "", x) for x in as_list(video.url)][0]
|
||||||
|
video.codec = Video.Codec.from_codecs(video.data["hls"]["playlist"].stream_info.codecs)
|
||||||
|
video.bitrate = int(self.find(r"-video=(\d+)", as_list(video.url)[0]) or 0)
|
||||||
|
|
||||||
|
for caption in [x for x in media if x["kind"] == "captions"]:
|
||||||
|
connection = sorted(caption["connection"], key=lambda x: x["priority"])[0]
|
||||||
|
tracks.add(
|
||||||
|
Subtitle(
|
||||||
|
id_=hashlib.md5(connection["href"].encode()).hexdigest()[0:6],
|
||||||
|
url=connection["href"],
|
||||||
|
codec=Subtitle.Codec.from_codecs("ttml"),
|
||||||
|
language=title.language,
|
||||||
|
is_original_lang=True,
|
||||||
|
forced=False,
|
||||||
|
sdh=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# service specific functions
|
||||||
|
|
||||||
|
def get_data(self, pid: str, slice_id: str) -> dict:
|
||||||
|
json_data = {
|
||||||
|
"id": "9fd1636abe711717c2baf00cebb668de",
|
||||||
|
"variables": {
|
||||||
|
"id": pid,
|
||||||
|
"perPage": 200,
|
||||||
|
"page": 1,
|
||||||
|
"sliceId": slice_id if slice_id else None,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.post(self.config["endpoints"]["metadata"], json=json_data)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
return r.json()["data"]["programme"]
|
||||||
|
|
||||||
|
def check_all_versions(self, vpid: str) -> list:
|
||||||
|
media = None
|
||||||
|
|
||||||
|
if self.vcodec == "H.265":
|
||||||
|
if not self.config.get("cert"):
|
||||||
|
self.log.error(" - H.265 tracks cannot be requested without an SSL certificate")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
session = self.session
|
||||||
|
session.mount("https://", SSLCiphers())
|
||||||
|
session.mount("http://", SSLCiphers())
|
||||||
|
mediaset = "iptv-uhd"
|
||||||
|
|
||||||
|
for mediator in ["securegate.iplayer.bbc.co.uk", "ipsecure.stage.bbc.co.uk"]:
|
||||||
|
availability = session.get(
|
||||||
|
self.config["endpoints"]["secure"].format(mediator, vpid, mediaset),
|
||||||
|
cert=self.config["cert"],
|
||||||
|
).json()
|
||||||
|
if availability.get("media"):
|
||||||
|
media = availability["media"]
|
||||||
|
break
|
||||||
|
|
||||||
|
if availability.get("result"):
|
||||||
|
self.log.error(f"Error: {availability['result']}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
mediaset = "iptv-all"
|
||||||
|
|
||||||
|
for mediator in ["open.live.bbc.co.uk", "open.stage.bbc.co.uk"]:
|
||||||
|
availability = self.session.get(
|
||||||
|
self.config["endpoints"]["open"].format(mediator, mediaset, vpid),
|
||||||
|
).json()
|
||||||
|
if availability.get("media"):
|
||||||
|
media = availability["media"]
|
||||||
|
break
|
||||||
|
|
||||||
|
if availability.get("result"):
|
||||||
|
self.log.error(f"Error: {availability['result']}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return media
|
||||||
|
|
||||||
|
def fetch_episode(self, pid: str) -> Series:
|
||||||
|
r = self.session.get(self.config["endpoints"]["episodes"].format(pid=pid))
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
data = json.loads(r.content)
|
||||||
|
episode = data["episodes"][0]
|
||||||
|
subtitle = episode.get("subtitle")
|
||||||
|
year = episode.get("release_date_time", "").split("-")[0]
|
||||||
|
numeric_position = episode.get("numeric_tleo_position")
|
||||||
|
|
||||||
|
if subtitle is not None:
|
||||||
|
series = re.finditer(r"Series (\d+):|Season (\d+):|(\d{4}/\d{2}): Episode \d+", subtitle or "")
|
||||||
|
season_num = int(next((m.group(1) or m.group(2) or m.group(3).replace("/", "") for m in series), 0))
|
||||||
|
if season_num == 0 and not data.get("slices"):
|
||||||
|
season_num = 1
|
||||||
|
number_match = re.finditer(r"(\d+)\.|Episode (\d+)", subtitle)
|
||||||
|
number = int(next((m.group(1) or m.group(2) for m in number_match), numeric_position or 0))
|
||||||
|
name_match = re.search(r"\d+\. (.+)", subtitle)
|
||||||
|
name = (
|
||||||
|
name_match.group(1)
|
||||||
|
if name_match
|
||||||
|
else subtitle
|
||||||
|
if not re.search(r"Series (\d+): Episode (\d+)", subtitle)
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
return Episode(
|
||||||
|
id_=episode.get("id"),
|
||||||
|
service=self.__class__,
|
||||||
|
title=episode.get("title"),
|
||||||
|
season=season_num if subtitle else 0,
|
||||||
|
number=number if subtitle else 0,
|
||||||
|
name=name if subtitle else "",
|
||||||
|
language="en",
|
||||||
|
year=year,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_episodes(self, episodes: list) -> list:
|
||||||
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
tasks = list(executor.map(self.fetch_episode, episodes))
|
||||||
|
return [task for task in tasks if task is not None]
|
||||||
|
|
||||||
|
def find(self, pattern, string, group=None):
|
||||||
|
if group:
|
||||||
|
m = re.search(pattern, string)
|
||||||
|
if m:
|
||||||
|
return m.group(group)
|
||||||
|
else:
|
||||||
|
return next(iter(re.findall(pattern, string)), None)
|
||||||
11
services/iP/config.yaml
Normal file
11
services/iP/config.yaml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
base_url: https://www.bbc.co.uk/iplayer/{type}/{pid}
|
||||||
|
user_agent: smarttv_AFTMM_Build_0003255372676_Chromium_41.0.2250.2
|
||||||
|
api_key: D2FgtcTxGqqIgLsfBWTJdrQh2tVdeaAp
|
||||||
|
|
||||||
|
endpoints:
|
||||||
|
episodes: https://ibl.api.bbci.co.uk/ibl/v1/episodes/{pid}?rights=mobile&availability=available
|
||||||
|
metadata: https://graph.ibl.api.bbc.co.uk/
|
||||||
|
playlist: https://www.bbc.co.uk/programmes/{pid}/playlist.json
|
||||||
|
open: https://{}/mediaselector/6/select/version/2.0/mediaset/{}/vpid/{}/
|
||||||
|
secure: https://{}/mediaselector/6/select/version/2.0/vpid/{}/format/json/mediaset/{}/proto/https
|
||||||
|
search: https://search.api.bbci.co.uk/formula/iplayer-ibl-root
|
||||||
Loading…
Reference in New Issue
Block a user