Update to cache keys, and download it once daily.

Use --force-update-keys to redownload new keys from sources.
This commit is contained in:
SuperUserek 2026-03-05 11:19:39 +00:00
parent 6091d3257f
commit 164ed468e6

View File

@ -1,19 +1,14 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
""" """
Auto-decrypt helper for LG-style blobs. SuperUserek AutoKeybox Decoder (Auto AES Decrypt Tool)
Key sources parsed (by default): What it does:
1) openlgtv/epk2extract AES.key - Downloads + parses AES keys from multiple sources (default list below)
https://raw.githubusercontent.com/openlgtv/epk2extract/refs/heads/master/keys/AES.key - Caches the merged, deduped key list to a local "keys.txt"
- Only updates the cache once per day (unless --force-update-keys)
2) MyDRMTools KnownKeys.txt (RAW) - Optionally includes/only-uses a custom user-provided AES key
https://git.drmlab.io/SuperUserek/MyDRMTools/raw/branch/main/AutoKeyboxDecoder/KnownKeys.txt - Tries AES modes (ECB/CBC/CFB/OFB/CTR) + IV strategies
- Validates by searching for b"INNER_MSTAR"
Features:
- Multiple key sources (repeat --keys-url, or use defaults)
- --key <hex> custom key (AES-128/192/256 only)
- --only-custom to skip downloading lists
- Tries AES modes (ECB/CBC/CFB/OFB/CTR) + IV strategies, validates by b"INNER_MSTAR"
- Extracts payload using CHAI/kbox logic and saves output - Extracts payload using CHAI/kbox logic and saves output
Requires: Requires:
@ -24,6 +19,7 @@ import argparse
import os import os
import re import re
from dataclasses import dataclass from dataclasses import dataclass
from datetime import date
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
import requests import requests
@ -33,7 +29,7 @@ from Crypto.Util import Counter
DEFAULT_KEYS_URLS = [ DEFAULT_KEYS_URLS = [
"https://raw.githubusercontent.com/openlgtv/epk2extract/refs/heads/master/keys/AES.key", "https://raw.githubusercontent.com/openlgtv/epk2extract/refs/heads/master/keys/AES.key",
# IMPORTANT: use RAW endpoint, not /src/branch/ (HTML) # IMPORTANT: RAW endpoint (not /src/branch/ which is HTML)
"https://git.drmlab.io/SuperUserek/MyDRMTools/raw/branch/main/AutoKeyboxDecoder/KnownKeys.txt", "https://git.drmlab.io/SuperUserek/MyDRMTools/raw/branch/main/AutoKeyboxDecoder/KnownKeys.txt",
] ]
@ -57,7 +53,7 @@ def parse_keys(text: str) -> List[str]:
Works for: Works for:
- AES.key style lines with comments - AES.key style lines with comments
- KnownKeys.txt where keys can be 1-per-line or space-separated on one line - KnownKeys.txt where keys can be 1-per-line or space-separated
It finds ALL 32/48/64-hex tokens in each (comment-stripped) line. It finds ALL 32/48/64-hex tokens in each (comment-stripped) line.
""" """
@ -70,25 +66,13 @@ def parse_keys(text: str) -> List[str]:
# strip inline comments # strip inline comments
line = line.split("#", 1)[0] line = line.split("#", 1)[0]
# find every key token on the line (handles space-separated single-line lists) # find every key token on the line
for m in HEX_KEY_RE.findall(line): for m in HEX_KEY_RE.findall(line):
keys.append(m.upper()) keys.append(m.upper())
return keys return keys
def group_keys_by_aes_size(keys_hex: List[str]) -> Dict[int, List[str]]:
grouped: Dict[int, List[str]] = {16: [], 24: [], 32: []}
for khex in keys_hex:
try:
kb = bytes.fromhex(khex)
except ValueError:
continue
if len(kb) in grouped:
grouped[len(kb)].append(khex.upper())
return grouped
def normalize_custom_key(key_hex: str) -> str: def normalize_custom_key(key_hex: str) -> str:
k = key_hex.strip() k = key_hex.strip()
if k.startswith(("0x", "0X")): if k.startswith(("0x", "0X")):
@ -104,7 +88,29 @@ def normalize_custom_key(key_hex: str) -> str:
return k.upper() return k.upper()
def group_keys_by_aes_size(keys_hex: List[str]) -> Dict[int, List[str]]:
grouped: Dict[int, List[str]] = {16: [], 24: [], 32: []}
for khex in keys_hex:
try:
kb = bytes.fromhex(khex)
except ValueError:
continue
if len(kb) in grouped:
grouped[len(kb)].append(khex.upper())
return grouped
def _ensure_block_multiple(data: bytes) -> bytes:
# Keep your original behavior: pad ciphertext to 16 then decrypt for block modes
if len(data) % 16 == 0:
return data
return pad(data, 16)
def extract_payload(dec_data: bytes) -> bytes: def extract_payload(dec_data: bytes) -> bytes:
"""
Your payload extraction logic (unchanged).
"""
payload = None payload = None
for offset in (64, 96): for offset in (64, 96):
candidate = dec_data[offset:] candidate = dec_data[offset:]
@ -133,13 +139,6 @@ class AttemptResult:
plaintext: bytes plaintext: bytes
def _ensure_block_multiple(data: bytes) -> bytes:
# Keep your original behavior: pad ciphertext to 16 then decrypt for block modes
if len(data) % 16 == 0:
return data
return pad(data, 16)
def try_decrypt_with_key(encrypted: bytes, key_hex: str) -> List[AttemptResult]: def try_decrypt_with_key(encrypted: bytes, key_hex: str) -> List[AttemptResult]:
try: try:
key = bytes.fromhex(key_hex) key = bytes.fromhex(key_hex)
@ -160,13 +159,10 @@ def try_decrypt_with_key(encrypted: bytes, key_hex: str) -> List[AttemptResult]:
# --- IV candidates for CBC/CFB/OFB --- # --- IV candidates for CBC/CFB/OFB ---
iv_candidates: List[Tuple[str, bytes, bytes]] = [] iv_candidates: List[Tuple[str, bytes, bytes]] = []
iv_zero = b"\x00" * 16 iv_candidates.append(("IV_ZERO", b"\x00" * 16, encrypted))
iv_candidates.append(("IV_ZERO", iv_zero, encrypted))
if len(encrypted) >= 16: if len(encrypted) >= 16:
iv_from_prefix = encrypted[:16] iv_candidates.append(("IV_PREFIX16", encrypted[:16], encrypted[16:]))
ct_after_prefix = encrypted[16:]
iv_candidates.append(("IV_PREFIX16", iv_from_prefix, ct_after_prefix))
# --- CBC --- # --- CBC ---
for iv_label, iv, ct in iv_candidates: for iv_label, iv, ct in iv_candidates:
@ -232,27 +228,99 @@ def try_decrypt_with_key(encrypted: bytes, key_hex: str) -> List[AttemptResult]:
return successes return successes
# -------------------------
# Key cache: keys.txt daily
# -------------------------
def is_cache_fresh(cache_path: str) -> bool:
"""
Fresh means: cache file mtime is 'today' (local date).
"""
if not os.path.exists(cache_path):
return False
try:
mtime = os.path.getmtime(cache_path)
except OSError:
return False
return date.fromtimestamp(mtime) == date.today()
def load_keys_from_cache(cache_path: str) -> List[str]:
"""
Read keys.txt. Accepts:
- one key per line
- ignores comments/blank lines
"""
keys: List[str] = []
if not os.path.exists(cache_path):
return keys
with open(cache_path, "r", encoding="utf-8", errors="ignore") as f:
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
for m in HEX_KEY_RE.findall(line):
keys.append(m.upper())
return keys
def save_keys_to_cache(cache_path: str, keys: List[str], sources: List[str]) -> None:
os.makedirs(os.path.dirname(os.path.abspath(cache_path)) or ".", exist_ok=True)
with open(cache_path, "w", encoding="utf-8") as f:
f.write("# SuperUserek AutoKeybox Decoder - Cached AES keys\n")
f.write(f"# Updated: {date.today().isoformat()}\n")
f.write("# Sources:\n")
for s in sources:
f.write(f"# - {s}\n")
f.write("\n")
for k in keys:
f.write(k + "\n")
def build_key_list( def build_key_list(
keys_urls: List[str], keys_urls: List[str],
custom_key: Optional[str], custom_key: Optional[str],
only_custom: bool, only_custom: bool,
) -> List[str]: cache_path: str,
force_update_keys: bool,
) -> Tuple[List[str], bool]:
"""
Returns (keys, used_cache)
"""
keys: List[str] = [] keys: List[str] = []
# custom key first # custom key first (if any)
if custom_key: if custom_key:
keys.append(normalize_custom_key(custom_key)) keys.append(normalize_custom_key(custom_key))
if only_custom: if only_custom:
return keys return keys, False
# If cache is fresh and not forced, use it
if (not force_update_keys) and is_cache_fresh(cache_path):
cached = load_keys_from_cache(cache_path)
# Dedup but keep custom key first if present
seen = set(keys)
for k in cached:
if k not in seen:
# ensure valid AES size
try:
kb = bytes.fromhex(k)
except ValueError:
continue
if len(kb) not in (16, 24, 32):
continue
keys.append(k)
seen.add(k)
return keys, True
# Otherwise download + parse, then write cache
seen = set(keys) seen = set(keys)
downloaded: List[str] = []
for url in keys_urls: for url in keys_urls:
text = download_text(url) text = download_text(url)
parsed = parse_keys(text) for k in parse_keys(text):
# Dedup + keep AES-valid sizes only
for k in parsed:
try: try:
kb = bytes.fromhex(k) kb = bytes.fromhex(k)
except ValueError: except ValueError:
@ -260,10 +328,15 @@ def build_key_list(
if len(kb) not in (16, 24, 32): if len(kb) not in (16, 24, 32):
continue continue
if k not in seen: if k not in seen:
keys.append(k) downloaded.append(k)
seen.add(k) seen.add(k)
return keys # Save merged list (custom key not included in cache; cache is for known keys)
save_keys_to_cache(cache_path, downloaded, keys_urls)
# final keys list = custom (if any) + downloaded
keys.extend(downloaded)
return keys, False
def auto_decrypt( def auto_decrypt(
@ -273,17 +346,28 @@ def auto_decrypt(
stop_on_first: bool, stop_on_first: bool,
custom_key: Optional[str], custom_key: Optional[str],
only_custom: bool, only_custom: bool,
cache_path: str,
force_update_keys: bool,
) -> int: ) -> int:
with open(input_file, "rb") as f: with open(input_file, "rb") as f:
encrypted_data = f.read() encrypted_data = f.read()
all_keys = build_key_list(keys_urls, custom_key, only_custom) all_keys, used_cache = build_key_list(
keys_urls=keys_urls,
custom_key=custom_key,
only_custom=only_custom,
cache_path=cache_path,
force_update_keys=force_update_keys,
)
if not all_keys: if not all_keys:
print("[-] No keys to try. Provide --key or disable --only-custom.") print("[-] No keys to try. Provide --key or disable --only-custom.")
return 3 return 3
grouped = group_keys_by_aes_size(all_keys) grouped = group_keys_by_aes_size(all_keys)
print("[*] Keys ready:") cache_msg = f"(cache: {os.path.abspath(cache_path)}"
cache_msg += ", used cached keys)" if used_cache else ", updated/downloaded keys)"
print(f"[*] Keys ready {cache_msg}")
print(f" AES-128: {len(grouped[16])}") print(f" AES-128: {len(grouped[16])}")
print(f" AES-192: {len(grouped[24])}") print(f" AES-192: {len(grouped[24])}")
print(f" AES-256: {len(grouped[32])}") print(f" AES-256: {len(grouped[32])}")
@ -333,7 +417,7 @@ def auto_decrypt(
def main(): def main():
ap = argparse.ArgumentParser( ap = argparse.ArgumentParser(
description="Try AES keys (multiple remote lists + optional custom key) across AES modes to decrypt an input file." description="Try AES keys (cached daily) across AES modes to decrypt an input file."
) )
ap.add_argument("file", help="Path to encrypted input file (e.g. file.dat)") ap.add_argument("file", help="Path to encrypted input file (e.g. file.dat)")
@ -344,6 +428,18 @@ def main():
help="Add a key list URL to download+parse (repeatable). If omitted, uses built-in defaults.", help="Add a key list URL to download+parse (repeatable). If omitted, uses built-in defaults.",
) )
ap.add_argument(
"--keys-cache",
default="keys.txt",
help='Path to local keys cache file (default: "keys.txt")',
)
ap.add_argument(
"--force-update-keys",
action="store_true",
help="Force re-download + rebuild keys cache (otherwise updates only once per day).",
)
ap.add_argument("--outdir", default=None, help="Output directory (default: alongside input file)") ap.add_argument("--outdir", default=None, help="Output directory (default: alongside input file)")
ap.add_argument( ap.add_argument(
@ -354,7 +450,7 @@ def main():
ap.add_argument( ap.add_argument(
"--only-custom", "--only-custom",
action="store_true", action="store_true",
help="Only try the custom key (skip downloading remote key lists).", help="Only try the custom key (skip downloading key lists / cache).",
) )
ap.add_argument( ap.add_argument(
@ -374,6 +470,8 @@ def main():
stop_on_first=not args.all_matches, stop_on_first=not args.all_matches,
custom_key=args.key, custom_key=args.key,
only_custom=args.only_custom, only_custom=args.only_custom,
cache_path=args.keys_cache,
force_update_keys=args.force_update_keys,
) )
except requests.RequestException as e: except requests.RequestException as e:
print(f"[-] Failed to download a keys list: {e}") print(f"[-] Failed to download a keys list: {e}")