Changes
Added custom m3u8 parser for DSNP. Hybrid DV+HDR is to be tested. Few miscellaneous fixes.
This commit is contained in:
parent
4a773d1db0
commit
a9207d263d
85
poetry.lock
generated
85
poetry.lock
generated
@ -44,6 +44,62 @@ files = [
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backports-datetime-fromisoformat"
|
||||
version = "2.0.3"
|
||||
description = "Backport of Python 3.11's datetime.fromisoformat"
|
||||
optional = false
|
||||
python-versions = ">3"
|
||||
files = [
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f681f638f10588fa3c101ee9ae2b63d3734713202ddfcfb6ec6cea0778a29d4"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:cd681460e9142f1249408e5aee6d178c6d89b49e06d44913c8fdfb6defda8d1c"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ee68bc8735ae5058695b76d3bb2aee1d137c052a11c8303f1e966aa23b72b65b"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8273fe7932db65d952a43e238318966eab9e49e8dd546550a41df12175cc2be4"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39d57ea50aa5a524bb239688adc1d1d824c31b6094ebd39aa164d6cadb85de22"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac6272f87693e78209dc72e84cf9ab58052027733cd0721c55356d3c881791cf"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:44c497a71f80cd2bcfc26faae8857cf8e79388e3d5fbf79d2354b8c360547d58"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:6335a4c9e8af329cb1ded5ab41a666e1448116161905a94e054f205aa6d263bc"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2e4b66e017253cdbe5a1de49e0eecff3f66cd72bcb1229d7db6e6b1832c0443"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:43e2d648e150777e13bbc2549cc960373e37bf65bd8a5d2e0cef40e16e5d8dd0"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4ce6326fd86d5bae37813c7bf1543bae9e4c215ec6f5afe4c518be2635e2e005"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7c8fac333bf860208fd522a5394369ee3c790d0aa4311f515fcc4b6c5ef8d75"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4da5ab3aa0cc293dc0662a0c6d1da1a011dc1edcbc3122a288cfed13a0b45"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58ea11e3bf912bd0a36b0519eae2c5b560b3cb972ea756e66b73fb9be460af01"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8a375c7dbee4734318714a799b6c697223e4bbb57232af37fbfff88fb48a14c6"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:ac677b1664c4585c2e014739f6678137c8336815406052349c85898206ec7061"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ce47ee1ba91e146149cf40565c3d750ea1be94faf660ca733d8601e0848147"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8b7e069910a66b3bba61df35b5f879e5253ff0821a70375b9daf06444d046fa4"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a3b5d1d04a9e0f7b15aa1e647c750631a873b298cdd1255687bb68779fe8eb35"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1b95986430e789c076610aea704db20874f0781b8624f648ca9fb6ef67c6e1"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffe5f793db59e2f1d45ec35a1cf51404fdd69df9f6952a0c87c3060af4c00e32"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:620e8e73bd2595dfff1b4d256a12b67fce90ece3de87b38e1dde46b910f46f4d"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4cf9c0a985d68476c1cabd6385c691201dda2337d7453fb4da9679ce9f23f4e7"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:d144868a73002e6e2e6fef72333e7b0129cecdd121aa8f1edba7107fd067255d"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e81b26497a17c29595bc7df20bc6a872ceea5f8c9d6537283945d4b6396aec10"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:5ba00ead8d9d82fd6123eb4891c566d30a293454e54e32ff7ead7644f5f7e575"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:24d574cb4072e1640b00864e94c4c89858033936ece3fc0e1c6f7179f120d0a8"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9735695a66aad654500b0193525e590c693ab3368478ce07b34b443a1ea5e824"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63d39709e17eb72685d052ac82acf0763e047f57c86af1b791505b1fec96915d"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1ea2cc84224937d6b9b4c07f5cb7c667f2bde28c255645ba27f8a675a7af8234"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4024e6d35a9fdc1b3fd6ac7a673bd16cb176c7e0b952af6428b7129a70f72cce"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5e2dcc94dc9c9ab8704409d86fcb5236316e9dcef6feed8162287634e3568f4c"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa2de871801d824c255fac7e5e7e50f2be6c9c376fd9268b40c54b5e9da91f42"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1314d4923c1509aa9696712a7bc0c7160d3b7acf72adafbbe6c558d523f5d491"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b750ecba3a8815ad8bc48311552f3f8ab99dd2326d29df7ff670d9c49321f48f"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d5117dce805d8a2f78baeddc8c6127281fa0a5e2c40c6dd992ba6b2b367876"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb35f607bd1cbe37b896379d5f5ed4dc298b536f4b959cb63180e05cacc0539d"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:61c74710900602637d2d145dda9720c94e303380803bf68811b2a151deec75c2"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ece59af54ebf67ecbfbbf3ca9066f5687879e36527ad69d8b6e3ac565d565a62"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:d0a7c5f875068efe106f62233bc712d50db4d07c13c7db570175c7857a7b5dbd"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90e202e72a3d5aae673fcc8c9a4267d56b2f532beeb9173361293625fe4d2039"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2df98ef1b76f5a58bb493dda552259ba60c3a37557d848e039524203951c9f06"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7100adcda5e818b5a894ad0626e38118bb896a347f40ebed8981155675b9ba7b"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e410383f5d6a449a529d074e88af8bc80020bb42b402265f9c02c8358c11da5"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2797593760da6bcc32c4a13fa825af183cd4bfd333c60b3dbf84711afca26ef"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35a144fd681a0bea1013ccc4cd3fd4dc758ea17ee23dca019c02b82ec46fc0c4"},
|
||||
{file = "backports_datetime_fromisoformat-2.0.3.tar.gz", hash = "sha256:b58edc8f517b66b397abc250ecc737969486703a66eb97e01e6d51291b1a139d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.13.3"
|
||||
@ -756,17 +812,6 @@ files = [
|
||||
{file = "installer-0.7.0.tar.gz", hash = "sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iso8601"
|
||||
version = "2.1.0"
|
||||
description = "Simple module to parse ISO 8601 dates"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242"},
|
||||
{file = "iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
version = "0.6.1"
|
||||
@ -1059,17 +1104,19 @@ source = ["Cython (>=3.0.11,<3.1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "m3u8"
|
||||
version = "0.9.0"
|
||||
version = "6.0.0"
|
||||
description = "Python m3u8 parser"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "m3u8-0.9.0-py3-none-any.whl", hash = "sha256:7dde0a20cf985422593810006dd371a1e3e7afd33a76277111eba3f220288902"},
|
||||
{file = "m3u8-0.9.0.tar.gz", hash = "sha256:3ee058855c430dc364db6b8026269d2b4c1894b198bcc5c824039c551c05f497"},
|
||||
]
|
||||
python-versions = ">=3.9"
|
||||
files = []
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
iso8601 = "*"
|
||||
backports-datetime-fromisoformat = {version = "*", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
url = "scripts/m3u8"
|
||||
|
||||
[[package]]
|
||||
name = "macholib"
|
||||
@ -2547,4 +2594,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "a6d1d8597c66d0b914da73da39508244167ff2ea215b36569f40e0d0c909b74a"
|
||||
content-hash = "5cb44b9ebe6b4d0faf53bf5e8f527e501801c6d34c8820a206faa64b66867038"
|
||||
|
||||
@ -24,7 +24,7 @@ isodate = "^0.6.1"
|
||||
jsonpickle = "^2.0.0"
|
||||
langcodes = { extras = ["data"], version = "^3.1.0" }
|
||||
lxml = "^5.3.0"
|
||||
m3u8 = "^0.9.0"
|
||||
m3u8 = { path = "./scripts/m3u8", develop = true }
|
||||
marisa-trie = "^1.1.0"
|
||||
poetry = "1.8.5"
|
||||
pproxy = "^2.7.7"
|
||||
|
||||
@ -2,9 +2,18 @@ import uuid
|
||||
import base64
|
||||
import xmltodict
|
||||
|
||||
psshPR = """FAMAAAEAAQAKAzwAVwBSAE0ASABFAEEARABFAFIAIAB4AG0AbABuAHMAPQAiAGgAdAB0AHAAOgAvAC8AcwBjAGgAZQBtAGEAcwAuAG0AaQBjAHIAbwBzAG8AZgB0AC4AYwBvAG0ALwBEAFIATQAvADIAMAAwADcALwAwADMALwBQAGwAYQB5AFIAZQBhAGQAeQBIAGUAYQBkAGUAcgAiACAAdgBlAHIAcwBpAG8AbgA9ACIANAAuADAALgAwAC4AMAAiAD4APABEAEEAVABBAD4APABQAFIATwBUAEUAQwBUAEkATgBGAE8APgA8AEsARQBZAEwARQBOAD4AMQA2ADwALwBLAEUAWQBMAEUATgA+ADwAQQBMAEcASQBEAD4AQQBFAFMAQwBUAFIAPAAvAEEATABHAEkARAA+ADwALwBQAFIATwBUAEUAQwBUAEkATgBGAE8APgA8AEMAVQBTAFQATwBNAEEAVABUAFIASQBCAFUAVABFAFMAPgA8AEsASQBEAFMAPgA8AEsASQBEACAAQQBMAEcASQBEAD0AIgBBAEUAUwBDAFQAUgAiACAAVgBBAEwAVQBFAD0AIgA5ADkAVgBpAFIAdwA3AEsAcgBrAHkAVgBSADEAVQBnAFYAUQBCAG4AVQBRAD0APQAiAD4APAAvAEsASQBEAD4APABLAEkARAAgAEEATABHAEkARAA9ACIAQQBFAFMAQwBUAFIAIgAgAFYAQQBMAFUARQA9ACIAVQBOAEoAVQBHAGgAdwB3AHIAVQBPAEsAZgBYACsAUQBvAG4AVwBGAFcAZwA9AD0AIgA+ADwALwBLAEkARAA+ADwALwBLAEkARABTAD4APAAvAEMAVQBTAFQATwBNAEEAVABUAFIASQBCAFUAVABFAFMAPgA8AEsASQBEAD4AOQA5AFYAaQBSAHcANwBLAHIAawB5AFYAUgAxAFUAZwBWAFEAQgBuAFUAUQA9AD0APAAvAEsASQBEAD4APAAvAEQAQQBUAEEAPgA8AC8AVwBSAE0ASABFAEEARABFAFIAPgA="""
|
||||
psshPR = """
|
||||
xAEAAAEAAQC6ATwAVwBSAE0ASABFAEEARABFAFIAIAB4AG0AbABuAHMAPQAiAGgAdAB0AHAAOgAvAC8AcwBjAGgAZQBtAGEAcwAuAG0AaQBjAHIAbwBzAG8AZgB0AC4AYwBvAG0ALwBEAFIATQAvADIAMAAwADcALwAwADMALwBQAGwAYQB5AFIAZQBhAGQAeQBIAGUAYQBkAGUAcgAiACAAdgBlAHIAcwBpAG8AbgA9ACIANAAuADAALgAwAC4AMAAiAD4APABEAEEAVABBAD4APABQAFIATwBUAEUAQwBUAEkATgBGAE8APgA8AEsARQBZAEwARQBOAD4AMQA2ADwALwBLAEUAWQBMAEUATgA+ADwAQQBMAEcASQBEAD4AQQBFAFMAQwBUAFIAPAAvAEEATABHAEkARAA+ADwALwBQAFIATwBUAEUAQwBUAEkATgBGAE8APgA8AEsASQBEAD4ATAA0AGkAWQBTAHIAaQB2AGEARQAyAFQASwBHAFAAZQBlADkAYgB1AGcAZwA9AD0APAAvAEsASQBEAD4APAAvAEQAQQBUAEEAPgA8AC8AVwBSAE0ASABFAEEARABFAFIAPgA=
|
||||
"""
|
||||
|
||||
xml_str = base64.b64decode(psshPR).decode("utf-16-le", "ignore")
|
||||
xml_str = xml_str[xml_str.index("<"):]
|
||||
kids = [uuid.UUID(base64.b64decode(kid_xml['@VALUE']).hex()).bytes_le.hex() for kid_xml in xmltodict.parse(xml_str)['WRMHEADER']['DATA']['CUSTOMATTRIBUTES']['KIDS']['KID']]
|
||||
kids = []
|
||||
try:
|
||||
kids = [uuid.UUID(base64.b64decode(kid_xml['@VALUE']).hex()).bytes_le.hex().upper() for kid_xml in xmltodict.parse(xml_str)['WRMHEADER']['DATA']['CUSTOMATTRIBUTES']['KIDS']['KID']]
|
||||
except:
|
||||
another_kid = uuid.UUID(base64.b64decode(xmltodict.parse(xml_str)['WRMHEADER']['DATA']["KID"]).hex()).bytes_le.hex().upper()
|
||||
if another_kid not in kids:
|
||||
kids.append(another_kid.upper())
|
||||
|
||||
print(kids)
|
||||
14
scripts/m3u8/.editorconfig
Normal file
14
scripts/m3u8/.editorconfig
Normal file
@ -0,0 +1,14 @@
|
||||
# https://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*.py]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
40
scripts/m3u8/.github/workflows/main.yml
vendored
Normal file
40
scripts/m3u8/.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: CI
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
build:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
# You can use PyPy versions in python-version.
|
||||
# For example, pypy2 and pypy3
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Run all tests
|
||||
run: ./runtests
|
||||
|
||||
19
scripts/m3u8/.github/workflows/ruff.yml
vendored
Normal file
19
scripts/m3u8/.github/workflows/ruff.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Ruff
|
||||
run-name: Ruff
|
||||
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v1
|
||||
|
||||
ruff_format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/ruff-action@v1
|
||||
with:
|
||||
args: format --check --diff
|
||||
17
scripts/m3u8/.gitignore
vendored
Normal file
17
scripts/m3u8/.gitignore
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
*.pyc
|
||||
*.egg-info
|
||||
tests/server.stdout
|
||||
dist/
|
||||
build/
|
||||
bin/
|
||||
include/
|
||||
lib/
|
||||
lib64/
|
||||
local/
|
||||
.coverage
|
||||
.cache
|
||||
.python-version
|
||||
.idea/
|
||||
.vscode/
|
||||
venv/
|
||||
pyvenv.cfg
|
||||
11
scripts/m3u8/LICENSE
Normal file
11
scripts/m3u8/LICENSE
Normal file
@ -0,0 +1,11 @@
|
||||
m3u8 is licensed under the MIT License:
|
||||
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2012 globo.com webmedia@corp.globo.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
3
scripts/m3u8/MANIFEST.in
Normal file
3
scripts/m3u8/MANIFEST.in
Normal file
@ -0,0 +1,3 @@
|
||||
include requirements.txt
|
||||
include LICENSE
|
||||
include README.md
|
||||
104
scripts/m3u8/README.md
Normal file
104
scripts/m3u8/README.md
Normal file
@ -0,0 +1,104 @@
|
||||
 [](https://badge.fury.io/py/m3u8)
|
||||
|
||||
# m3u8
|
||||
|
||||
Python [m3u8](https://tools.ietf.org/html/rfc8216) parser.
|
||||
|
||||
# Documentation
|
||||
|
||||
## Loading a playlist
|
||||
|
||||
To load a playlist into an object from uri, file path or directly from
|
||||
string, use the `load/loads` functions:
|
||||
|
||||
```python
|
||||
import m3u8
|
||||
|
||||
playlist = m3u8.load('http://videoserver.com/playlist.m3u8') # this could also be an absolute filename
|
||||
print(playlist.segments)
|
||||
print(playlist.target_duration)
|
||||
|
||||
# if you already have the content as string, use
|
||||
|
||||
playlist = m3u8.loads('#EXTM3U8 ... etc ... ')
|
||||
```
|
||||
|
||||
## Dumping a playlist
|
||||
|
||||
To dump a playlist from an object to the console or a file, use the
|
||||
`dump/dumps` functions:
|
||||
|
||||
``` python
|
||||
import m3u8
|
||||
|
||||
playlist = m3u8.load('http://videoserver.com/playlist.m3u8')
|
||||
print(playlist.dumps())
|
||||
|
||||
# if you want to write a file from its content
|
||||
|
||||
playlist.dump('playlist.m3u8')
|
||||
```
|
||||
|
||||
# Supported tags
|
||||
|
||||
- [\#EXT-X-TARGETDURATION](https://tools.ietf.org/html/rfc8216#section-4.3.3.1)
|
||||
- [\#EXT-X-MEDIA-SEQUENCE](https://tools.ietf.org/html/rfc8216#section-4.3.3.2)
|
||||
- [\#EXT-X-DISCONTINUITY-SEQUENCE](https://tools.ietf.org/html/rfc8216#section-4.3.3.3)
|
||||
- [\#EXT-X-PROGRAM-DATE-TIME](https://tools.ietf.org/html/rfc8216#section-4.3.2.6)
|
||||
- [\#EXT-X-MEDIA](https://tools.ietf.org/html/rfc8216#section-4.3.4.1)
|
||||
- [\#EXT-X-PLAYLIST-TYPE](https://tools.ietf.org/html/rfc8216#section-4.3.3.5)
|
||||
- [\#EXT-X-KEY](https://tools.ietf.org/html/rfc8216#section-4.3.2.4)
|
||||
- [\#EXT-X-STREAM-INF](https://tools.ietf.org/html/rfc8216#section-4.3.4.2)
|
||||
- [\#EXT-X-VERSION](https://tools.ietf.org/html/rfc8216#section-4.3.1.2)
|
||||
- [\#EXT-X-ALLOW-CACHE](https://datatracker.ietf.org/doc/html/draft-pantos-http-live-streaming-07#section-3.3.6)
|
||||
- [\#EXT-X-ENDLIST](https://tools.ietf.org/html/rfc8216#section-4.3.3.4)
|
||||
- [\#EXTINF](https://tools.ietf.org/html/rfc8216#section-4.3.2.1)
|
||||
- [\#EXT-X-I-FRAMES-ONLY](https://tools.ietf.org/html/rfc8216#section-4.3.3.6)
|
||||
- [\#EXT-X-BITRATE](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.4.8)
|
||||
- [\#EXT-X-BYTERANGE](https://tools.ietf.org/html/rfc8216#section-4.3.2.2)
|
||||
- [\#EXT-X-I-FRAME-STREAM-INF](https://tools.ietf.org/html/rfc8216#section-4.3.4.3)
|
||||
- [\#EXT-X-IMAGES-ONLY](https://github.com/image-media-playlist/spec/blob/master/image_media_playlist_v0_4.pdf)
|
||||
- [\#EXT-X-IMAGE-STREAM-INF](https://github.com/image-media-playlist/spec/blob/master/image_media_playlist_v0_4.pdf)
|
||||
- [\#EXT-X-TILES](https://github.com/image-media-playlist/spec/blob/master/image_media_playlist_v0_4.pdf)
|
||||
- [\#EXT-X-DISCONTINUITY](https://tools.ietf.org/html/rfc8216#section-4.3.2.3)
|
||||
- \#EXT-X-CUE-OUT
|
||||
- \#EXT-X-CUE-OUT-CONT
|
||||
- \#EXT-X-CUE-IN
|
||||
- \#EXT-X-CUE-SPAN
|
||||
- \#EXT-OATCLS-SCTE35
|
||||
- [\#EXT-X-INDEPENDENT-SEGMENTS](https://tools.ietf.org/html/rfc8216#section-4.3.5.1)
|
||||
- [\#EXT-X-MAP](https://tools.ietf.org/html/rfc8216#section-4.3.2.5)
|
||||
- [\#EXT-X-START](https://tools.ietf.org/html/rfc8216#section-4.3.5.2)
|
||||
- [\#EXT-X-SERVER-CONTROL](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.3.8)
|
||||
- [\#EXT-X-PART-INF](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.3.7)
|
||||
- [\#EXT-X-PART](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.4.9)
|
||||
- [\#EXT-X-RENDITION-REPORT](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.5.4)
|
||||
- [\#EXT-X-SKIP](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-4.4.5.2)
|
||||
- [\#EXT-X-SESSION-DATA](https://tools.ietf.org/html/rfc8216#section-4.3.4.4)
|
||||
- [\#EXT-X-PRELOAD-HINT](https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.5.3)
|
||||
- [\#EXT-X-SESSION-KEY](https://tools.ietf.org/html/rfc8216#section-4.3.4.5)
|
||||
- [\#EXT-X-DATERANGE](https://tools.ietf.org/html/rfc8216#section-4.3.2.7)
|
||||
- [\#EXT-X-GAP](https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-05#section-4.4.2.7)
|
||||
- [\#EXT-X-CONTENT-STEERING](https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-10#section-4.4.6.64)
|
||||
|
||||
# Frequently Asked Questions
|
||||
|
||||
- [FAQ](https://github.com/globocom/m3u8/wiki/FAQ)
|
||||
|
||||
# Running Tests
|
||||
|
||||
``` bash
|
||||
$ ./runtests
|
||||
```
|
||||
|
||||
# Contributing
|
||||
|
||||
All contributions are welcome, but we will merge a pull request if, and
|
||||
only if, it
|
||||
|
||||
- Has tests
|
||||
- Follows the code conventions
|
||||
|
||||
If you plan to implement a new feature or something that will take more
|
||||
than a few minutes, please open an issue to make sure we don't work on
|
||||
the same thing.
|
||||
105
scripts/m3u8/m3u8/__init__.py
Normal file
105
scripts/m3u8/m3u8/__init__.py
Normal file
@ -0,0 +1,105 @@
|
||||
# Copyright 2014 Globo.com Player authors. All rights reserved.
|
||||
# Use of this source code is governed by a MIT License
|
||||
# license that can be found in the LICENSE file.
|
||||
|
||||
import os
|
||||
from urllib.parse import urljoin, urlsplit
|
||||
|
||||
from m3u8.httpclient import DefaultHTTPClient
|
||||
from m3u8.model import (
|
||||
M3U8,
|
||||
ContentSteering,
|
||||
DateRange,
|
||||
DateRangeList,
|
||||
IFramePlaylist,
|
||||
ImagePlaylist,
|
||||
Key,
|
||||
Media,
|
||||
MediaList,
|
||||
PartialSegment,
|
||||
PartialSegmentList,
|
||||
PartInformation,
|
||||
Playlist,
|
||||
PlaylistList,
|
||||
PreloadHint,
|
||||
RenditionReport,
|
||||
RenditionReportList,
|
||||
Segment,
|
||||
SegmentList,
|
||||
ServerControl,
|
||||
Skip,
|
||||
Start,
|
||||
Tiles,
|
||||
)
|
||||
from m3u8.parser import ParseError, parse
|
||||
|
||||
__all__ = (
|
||||
"M3U8",
|
||||
"Segment",
|
||||
"SegmentList",
|
||||
"PartialSegment",
|
||||
"PartialSegmentList",
|
||||
"Key",
|
||||
"Playlist",
|
||||
"IFramePlaylist",
|
||||
"Media",
|
||||
"MediaList",
|
||||
"PlaylistList",
|
||||
"Start",
|
||||
"RenditionReport",
|
||||
"RenditionReportList",
|
||||
"ServerControl",
|
||||
"Skip",
|
||||
"PartInformation",
|
||||
"PreloadHint",
|
||||
"DateRange",
|
||||
"DateRangeList",
|
||||
"ContentSteering",
|
||||
"ImagePlaylist",
|
||||
"Tiles",
|
||||
"loads",
|
||||
"load",
|
||||
"parse",
|
||||
"ParseError",
|
||||
)
|
||||
|
||||
|
||||
def loads(content, uri=None, custom_tags_parser=None):
|
||||
"""
|
||||
Given a string with a m3u8 content, returns a M3U8 object.
|
||||
Optionally parses a uri to set a correct base_uri on the M3U8 object.
|
||||
Raises ValueError if invalid content
|
||||
"""
|
||||
|
||||
if uri is None:
|
||||
return M3U8(content, custom_tags_parser=custom_tags_parser)
|
||||
else:
|
||||
base_uri = urljoin(uri, ".")
|
||||
return M3U8(content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
|
||||
|
||||
|
||||
def load(
|
||||
uri,
|
||||
timeout=None,
|
||||
headers={},
|
||||
custom_tags_parser=None,
|
||||
http_client=DefaultHTTPClient(),
|
||||
verify_ssl=True,
|
||||
):
|
||||
"""
|
||||
Retrieves the content from a given URI and returns a M3U8 object.
|
||||
Raises ValueError if invalid content or IOError if request fails.
|
||||
"""
|
||||
base_uri_parts = urlsplit(uri)
|
||||
if base_uri_parts.scheme and base_uri_parts.netloc:
|
||||
content, base_uri = http_client.download(uri, timeout, headers, verify_ssl)
|
||||
return M3U8(content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
|
||||
else:
|
||||
return _load_from_file(uri, custom_tags_parser)
|
||||
|
||||
|
||||
def _load_from_file(uri, custom_tags_parser=None):
|
||||
with open(uri, encoding="utf8") as fileobj:
|
||||
raw_content = fileobj.read().strip()
|
||||
base_uri = os.path.dirname(uri)
|
||||
return M3U8(raw_content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
|
||||
36
scripts/m3u8/m3u8/httpclient.py
Normal file
36
scripts/m3u8/m3u8/httpclient.py
Normal file
@ -0,0 +1,36 @@
|
||||
import gzip
|
||||
import ssl
|
||||
import urllib.request
|
||||
from urllib.parse import urljoin
|
||||
|
||||
|
||||
class DefaultHTTPClient:
|
||||
def __init__(self, proxies=None):
|
||||
self.proxies = proxies
|
||||
|
||||
def download(self, uri, timeout=None, headers={}, verify_ssl=True):
|
||||
proxy_handler = urllib.request.ProxyHandler(self.proxies)
|
||||
https_handler = HTTPSHandler(verify_ssl=verify_ssl)
|
||||
opener = urllib.request.build_opener(proxy_handler, https_handler)
|
||||
opener.addheaders = headers.items()
|
||||
resource = opener.open(uri, timeout=timeout)
|
||||
base_uri = urljoin(resource.geturl(), ".")
|
||||
|
||||
if resource.info().get("Content-Encoding") == "gzip":
|
||||
content = gzip.decompress(resource.read()).decode(
|
||||
resource.headers.get_content_charset(failobj="utf-8")
|
||||
)
|
||||
else:
|
||||
content = resource.read().decode(
|
||||
resource.headers.get_content_charset(failobj="utf-8")
|
||||
)
|
||||
return content, base_uri
|
||||
|
||||
|
||||
class HTTPSHandler:
|
||||
def __new__(self, verify_ssl=True):
|
||||
context = ssl.create_default_context()
|
||||
if not verify_ssl:
|
||||
context.check_hostname = False
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
return urllib.request.HTTPSHandler(context=context)
|
||||
52
scripts/m3u8/m3u8/mixins.py
Normal file
52
scripts/m3u8/m3u8/mixins.py
Normal file
@ -0,0 +1,52 @@
|
||||
from os.path import dirname
|
||||
from urllib.parse import urljoin, urlsplit
|
||||
|
||||
|
||||
class BasePathMixin:
|
||||
@property
|
||||
def absolute_uri(self):
|
||||
if self.uri is None:
|
||||
return None
|
||||
|
||||
ret = urljoin(self.base_uri, self.uri)
|
||||
if self.base_uri:
|
||||
base_uri_parts = urlsplit(self.base_uri)
|
||||
if (not base_uri_parts.scheme) and (not base_uri_parts.netloc):
|
||||
return ret
|
||||
|
||||
if not urlsplit(ret).scheme:
|
||||
raise ValueError("There can not be `absolute_uri` with no `base_uri` set")
|
||||
|
||||
return ret
|
||||
|
||||
@property
|
||||
def base_path(self):
|
||||
if self.uri is None:
|
||||
return None
|
||||
return dirname(self.get_path_from_uri())
|
||||
|
||||
def get_path_from_uri(self):
|
||||
"""Some URIs have a slash in the query string."""
|
||||
return self.uri.split("?")[0]
|
||||
|
||||
@base_path.setter
|
||||
def base_path(self, newbase_path):
|
||||
if self.uri is not None:
|
||||
if not self.base_path:
|
||||
self.uri = f"{newbase_path}/{self.uri}"
|
||||
else:
|
||||
self.uri = self.uri.replace(self.base_path, newbase_path)
|
||||
|
||||
|
||||
class GroupedBasePathMixin:
|
||||
def _set_base_uri(self, new_base_uri):
|
||||
for item in self:
|
||||
item.base_uri = new_base_uri
|
||||
|
||||
base_uri = property(None, _set_base_uri)
|
||||
|
||||
def _set_base_path(self, newbase_path):
|
||||
for item in self:
|
||||
item.base_path = newbase_path
|
||||
|
||||
base_path = property(None, _set_base_path)
|
||||
1673
scripts/m3u8/m3u8/model.py
Normal file
1673
scripts/m3u8/m3u8/model.py
Normal file
File diff suppressed because it is too large
Load Diff
795
scripts/m3u8/m3u8/parser.py
Normal file
795
scripts/m3u8/m3u8/parser.py
Normal file
@ -0,0 +1,795 @@
|
||||
# Copyright 2014 Globo.com Player authors. All rights reserved.
|
||||
# Use of this source code is governed by a MIT License
|
||||
# license that can be found in the LICENSE file.
|
||||
|
||||
import itertools
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
try:
|
||||
from backports.datetime_fromisoformat import MonkeyPatch
|
||||
|
||||
MonkeyPatch.patch_fromisoformat()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
from m3u8 import protocol, version_matching
|
||||
|
||||
"""
|
||||
http://tools.ietf.org/html/draft-pantos-http-live-streaming-08#section-3.2
|
||||
http://stackoverflow.com/questions/2785755/how-to-split-but-ignore-separators-in-quoted-strings-in-python
|
||||
"""
|
||||
ATTRIBUTELISTPATTERN = re.compile(r"""((?:[^,"']|"[^"]*"|'[^']*')+)""")
|
||||
|
||||
|
||||
def cast_date_time(value):
|
||||
return datetime.fromisoformat(value)
|
||||
|
||||
|
||||
def format_date_time(value, **kwargs):
|
||||
return value.isoformat(**kwargs)
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
def __init__(self, lineno, line):
|
||||
self.lineno = lineno
|
||||
self.line = line
|
||||
|
||||
def __str__(self):
|
||||
return "Syntax error in manifest on line %d: %s" % (self.lineno, self.line)
|
||||
|
||||
|
||||
def parse(content, strict=False, custom_tags_parser=None):
|
||||
"""
|
||||
Given a M3U8 playlist content returns a dictionary with all data found
|
||||
"""
|
||||
data = {
|
||||
"media_sequence": 0,
|
||||
"is_variant": False,
|
||||
"is_endlist": False,
|
||||
"is_i_frames_only": False,
|
||||
"is_independent_segments": False,
|
||||
"is_images_only": False,
|
||||
"playlist_type": None,
|
||||
"playlists": [],
|
||||
"segments": [],
|
||||
"iframe_playlists": [],
|
||||
"image_playlists": [],
|
||||
"tiles": [],
|
||||
"media": [],
|
||||
"keys": [],
|
||||
"rendition_reports": [],
|
||||
"skip": {},
|
||||
"part_inf": {},
|
||||
"session_data": [],
|
||||
"session_keys": [],
|
||||
"segment_map": [],
|
||||
}
|
||||
|
||||
state = {
|
||||
"expect_segment": False,
|
||||
"expect_playlist": False,
|
||||
"current_key": None,
|
||||
"current_segment_map": None,
|
||||
}
|
||||
|
||||
lines = string_to_lines(content)
|
||||
if strict:
|
||||
found_errors = version_matching.validate(lines)
|
||||
|
||||
if len(found_errors) > 0:
|
||||
raise Exception(found_errors)
|
||||
|
||||
for lineno, line in enumerate(lines, 1):
|
||||
line = line.strip()
|
||||
parse_kwargs = {
|
||||
"line": line,
|
||||
"lineno": lineno,
|
||||
"data": data,
|
||||
"state": state,
|
||||
"strict": strict,
|
||||
}
|
||||
|
||||
# Call custom parser if needed
|
||||
if line.startswith("#") and callable(custom_tags_parser):
|
||||
go_to_next_line = custom_tags_parser(line, lineno, data, state)
|
||||
|
||||
# Do not try to parse other standard tags on this line if custom_tags_parser
|
||||
# function returns `True`
|
||||
if go_to_next_line:
|
||||
continue
|
||||
|
||||
if line.startswith(protocol.ext_x_byterange):
|
||||
_parse_byterange(**parse_kwargs)
|
||||
continue
|
||||
|
||||
elif line.startswith(protocol.ext_x_bitrate):
|
||||
_parse_bitrate(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_targetduration):
|
||||
_parse_targetduration(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_media_sequence):
|
||||
_parse_media_sequence(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_discontinuity_sequence):
|
||||
_parse_discontinuity_sequence(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_program_date_time):
|
||||
_parse_program_date_time(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_discontinuity):
|
||||
_parse_discontinuity(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_cue_out_cont):
|
||||
_parse_cueout_cont(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_cue_out):
|
||||
_parse_cueout(**parse_kwargs)
|
||||
|
||||
elif line.startswith(f"{protocol.ext_oatcls_scte35}:"):
|
||||
_parse_oatcls_scte35(**parse_kwargs)
|
||||
|
||||
elif line.startswith(f"{protocol.ext_x_asset}:"):
|
||||
_parse_asset(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_cue_in):
|
||||
_parse_cue_in(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_cue_span):
|
||||
_parse_cue_span(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_version):
|
||||
_parse_version(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_allow_cache):
|
||||
_parse_allow_cache(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_key):
|
||||
_parse_key(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.extinf):
|
||||
_parse_extinf(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_stream_inf):
|
||||
_parse_stream_inf(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_i_frame_stream_inf):
|
||||
_parse_i_frame_stream_inf(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_media):
|
||||
_parse_media(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_playlist_type):
|
||||
_parse_playlist_type(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_i_frames_only):
|
||||
_parse_i_frames_only(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_is_independent_segments):
|
||||
_parse_is_independent_segments(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_endlist):
|
||||
_parse_endlist(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_map):
|
||||
_parse_x_map(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_start):
|
||||
_parse_start(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_server_control):
|
||||
_parse_server_control(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_part_inf):
|
||||
_parse_part_inf(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_rendition_report):
|
||||
_parse_rendition_report(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_part):
|
||||
_parse_part(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_skip):
|
||||
_parse_skip(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_session_data):
|
||||
_parse_session_data(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_session_key):
|
||||
_parse_session_key(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_preload_hint):
|
||||
_parse_preload_hint(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_daterange):
|
||||
_parse_daterange(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_gap):
|
||||
_parse_gap(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_content_steering):
|
||||
_parse_content_steering(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_image_stream_inf):
|
||||
_parse_image_stream_inf(**parse_kwargs)
|
||||
|
||||
elif line.startswith(protocol.ext_x_images_only):
|
||||
_parse_is_images_only(**parse_kwargs)
|
||||
elif line.startswith(protocol.ext_x_tiles):
|
||||
_parse_tiles(**parse_kwargs)
|
||||
|
||||
# #EXTM3U should be present.
|
||||
elif line.startswith(protocol.ext_m3u):
|
||||
pass
|
||||
|
||||
# Blank lines are ignored.
|
||||
elif line.strip() == "":
|
||||
pass
|
||||
|
||||
# Lines that don't start with # are either segments or playlists.
|
||||
elif (not line.startswith("#")) and (state["expect_segment"]):
|
||||
_parse_ts_chunk(**parse_kwargs)
|
||||
|
||||
elif (not line.startswith("#")) and (state["expect_playlist"]):
|
||||
_parse_variant_playlist(**parse_kwargs)
|
||||
|
||||
# Lines that haven't been recognized by any of the parsers above are illegal
|
||||
# in strict mode.
|
||||
elif strict:
|
||||
raise ParseError(lineno, line)
|
||||
|
||||
# Handle remaining partial segments.
|
||||
if "segment" in state:
|
||||
data["segments"].append(state.pop("segment"))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _parse_key(line, data, state, **kwargs):
|
||||
params = ATTRIBUTELISTPATTERN.split(line.replace(protocol.ext_x_key + ":", ""))[
|
||||
1::2
|
||||
]
|
||||
key = {}
|
||||
for param in params:
|
||||
name, value = param.split("=", 1)
|
||||
key[normalize_attribute(name)] = remove_quotes(value)
|
||||
|
||||
state["current_key"] = key
|
||||
if key not in data["keys"]:
|
||||
data["keys"].append(key)
|
||||
|
||||
|
||||
def _parse_extinf(line, state, lineno, strict, **kwargs):
|
||||
chunks = line.replace(protocol.extinf + ":", "").split(",", 1)
|
||||
if len(chunks) == 2:
|
||||
duration, title = chunks
|
||||
elif len(chunks) == 1:
|
||||
if strict:
|
||||
raise ParseError(lineno, line)
|
||||
else:
|
||||
duration = chunks[0]
|
||||
title = ""
|
||||
if "segment" not in state:
|
||||
state["segment"] = {}
|
||||
state["segment"]["duration"] = float(duration)
|
||||
state["segment"]["title"] = title
|
||||
state["expect_segment"] = True
|
||||
|
||||
|
||||
def _parse_ts_chunk(line, data, state, **kwargs):
|
||||
segment = state.pop("segment")
|
||||
if state.get("program_date_time"):
|
||||
segment["program_date_time"] = state.pop("program_date_time")
|
||||
if state.get("current_program_date_time"):
|
||||
segment["current_program_date_time"] = state["current_program_date_time"]
|
||||
state["current_program_date_time"] += timedelta(seconds=segment["duration"])
|
||||
segment["uri"] = line
|
||||
segment["cue_in"] = state.pop("cue_in", False)
|
||||
segment["cue_out"] = state.pop("cue_out", False)
|
||||
segment["cue_out_start"] = state.pop("cue_out_start", False)
|
||||
segment["cue_out_explicitly_duration"] = state.pop(
|
||||
"cue_out_explicitly_duration", False
|
||||
)
|
||||
|
||||
scte_op = state.get if segment["cue_out"] else state.pop
|
||||
segment["scte35"] = scte_op("current_cue_out_scte35", None)
|
||||
segment["oatcls_scte35"] = scte_op("current_cue_out_oatcls_scte35", None)
|
||||
segment["scte35_duration"] = scte_op("current_cue_out_duration", None)
|
||||
segment["scte35_elapsedtime"] = scte_op("current_cue_out_elapsedtime", None)
|
||||
segment["asset_metadata"] = scte_op("asset_metadata", None)
|
||||
|
||||
segment["discontinuity"] = state.pop("discontinuity", False)
|
||||
if state.get("current_key"):
|
||||
segment["key"] = state["current_key"]
|
||||
else:
|
||||
# For unencrypted segments, the initial key would be None
|
||||
if None not in data["keys"]:
|
||||
data["keys"].append(None)
|
||||
if state.get("current_segment_map"):
|
||||
segment["init_section"] = state["current_segment_map"]
|
||||
segment["dateranges"] = state.pop("dateranges", None)
|
||||
segment["gap_tag"] = state.pop("gap", None)
|
||||
data["segments"].append(segment)
|
||||
state["expect_segment"] = False
|
||||
|
||||
|
||||
def _parse_attribute_list(prefix, line, attribute_parser, default_parser=None):
|
||||
params = ATTRIBUTELISTPATTERN.split(line.replace(prefix + ":", ""))[1::2]
|
||||
|
||||
attributes = {}
|
||||
if not line.startswith(prefix + ":"):
|
||||
return attributes
|
||||
|
||||
for param in params:
|
||||
param_parts = param.split("=", 1)
|
||||
if len(param_parts) == 1:
|
||||
name = ""
|
||||
value = param_parts[0]
|
||||
else:
|
||||
name, value = param_parts
|
||||
|
||||
name = normalize_attribute(name)
|
||||
if name in attribute_parser:
|
||||
value = attribute_parser[name](value)
|
||||
elif default_parser is not None:
|
||||
value = default_parser(value)
|
||||
|
||||
attributes[name] = value
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def _parse_stream_inf(line, data, state, **kwargs):
|
||||
state["expect_playlist"] = True
|
||||
data["is_variant"] = True
|
||||
data["media_sequence"] = None
|
||||
attribute_parser = remove_quotes_parser(
|
||||
"codecs",
|
||||
"audio",
|
||||
"video",
|
||||
"video_range",
|
||||
"subtitles",
|
||||
"pathway_id",
|
||||
"stable_variant_id",
|
||||
)
|
||||
attribute_parser["program_id"] = int
|
||||
attribute_parser["bandwidth"] = lambda x: int(float(x))
|
||||
attribute_parser["average_bandwidth"] = int
|
||||
attribute_parser["frame_rate"] = float
|
||||
attribute_parser["hdcp_level"] = str
|
||||
state["stream_info"] = _parse_attribute_list(
|
||||
protocol.ext_x_stream_inf, line, attribute_parser
|
||||
)
|
||||
|
||||
|
||||
def _parse_i_frame_stream_inf(line, data, **kwargs):
|
||||
attribute_parser = remove_quotes_parser(
|
||||
"codecs", "uri", "pathway_id", "stable_variant_id"
|
||||
)
|
||||
attribute_parser["program_id"] = int
|
||||
attribute_parser["bandwidth"] = int
|
||||
attribute_parser["average_bandwidth"] = int
|
||||
attribute_parser["hdcp_level"] = str
|
||||
iframe_stream_info = _parse_attribute_list(
|
||||
protocol.ext_x_i_frame_stream_inf, line, attribute_parser
|
||||
)
|
||||
iframe_playlist = {
|
||||
"uri": iframe_stream_info.pop("uri"),
|
||||
"iframe_stream_info": iframe_stream_info,
|
||||
}
|
||||
|
||||
data["iframe_playlists"].append(iframe_playlist)
|
||||
|
||||
|
||||
def _parse_image_stream_inf(line, data, **kwargs):
|
||||
attribute_parser = remove_quotes_parser(
|
||||
"codecs", "uri", "pathway_id", "stable_variant_id"
|
||||
)
|
||||
attribute_parser["program_id"] = int
|
||||
attribute_parser["bandwidth"] = int
|
||||
attribute_parser["average_bandwidth"] = int
|
||||
attribute_parser["resolution"] = str
|
||||
image_stream_info = _parse_attribute_list(
|
||||
protocol.ext_x_image_stream_inf, line, attribute_parser
|
||||
)
|
||||
image_playlist = {
|
||||
"uri": image_stream_info.pop("uri"),
|
||||
"image_stream_info": image_stream_info,
|
||||
}
|
||||
|
||||
data["image_playlists"].append(image_playlist)
|
||||
|
||||
|
||||
def _parse_is_images_only(line, data, **kwargs):
|
||||
data["is_images_only"] = True
|
||||
|
||||
|
||||
def _parse_tiles(line, data, state, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("uri")
|
||||
attribute_parser["resolution"] = str
|
||||
attribute_parser["layout"] = str
|
||||
attribute_parser["duration"] = float
|
||||
tiles_info = _parse_attribute_list(protocol.ext_x_tiles, line, attribute_parser)
|
||||
data["tiles"].append(tiles_info)
|
||||
|
||||
|
||||
def _parse_media(line, data, **kwargs):
|
||||
quoted = remove_quotes_parser(
|
||||
"uri",
|
||||
"group_id",
|
||||
"language",
|
||||
"assoc_language",
|
||||
"name",
|
||||
"instream_id",
|
||||
"characteristics",
|
||||
"channels",
|
||||
"stable_rendition_id",
|
||||
"thumbnails",
|
||||
"image",
|
||||
)
|
||||
media = _parse_attribute_list(protocol.ext_x_media, line, quoted)
|
||||
data["media"].append(media)
|
||||
|
||||
|
||||
def _parse_variant_playlist(line, data, state, **kwargs):
|
||||
playlist = {"uri": line, "stream_info": state.pop("stream_info")}
|
||||
data["playlists"].append(playlist)
|
||||
state["expect_playlist"] = False
|
||||
|
||||
|
||||
def _parse_bitrate(state, **kwargs):
|
||||
if "segment" not in state:
|
||||
state["segment"] = {}
|
||||
state["segment"]["bitrate"] = _parse_simple_parameter(cast_to=int, **kwargs)
|
||||
|
||||
|
||||
def _parse_byterange(line, state, **kwargs):
|
||||
if "segment" not in state:
|
||||
state["segment"] = {}
|
||||
state["segment"]["byterange"] = line.replace(protocol.ext_x_byterange + ":", "")
|
||||
state["expect_segment"] = True
|
||||
|
||||
|
||||
def _parse_targetduration(**parse_kwargs):
|
||||
return _parse_simple_parameter(cast_to=int, **parse_kwargs)
|
||||
|
||||
|
||||
def _parse_media_sequence(**parse_kwargs):
|
||||
return _parse_simple_parameter(cast_to=int, **parse_kwargs)
|
||||
|
||||
|
||||
def _parse_discontinuity_sequence(**parse_kwargs):
|
||||
return _parse_simple_parameter(cast_to=int, **parse_kwargs)
|
||||
|
||||
|
||||
def _parse_program_date_time(line, state, data, **parse_kwargs):
|
||||
_, program_date_time = _parse_simple_parameter_raw_value(
|
||||
line, cast_to=cast_date_time, **parse_kwargs
|
||||
)
|
||||
if not data.get("program_date_time"):
|
||||
data["program_date_time"] = program_date_time
|
||||
state["current_program_date_time"] = program_date_time
|
||||
state["program_date_time"] = program_date_time
|
||||
|
||||
|
||||
def _parse_discontinuity(state, **parse_kwargs):
|
||||
state["discontinuity"] = True
|
||||
|
||||
|
||||
def _parse_cue_in(state, **parse_kwargs):
|
||||
state["cue_in"] = True
|
||||
|
||||
|
||||
def _parse_cue_span(state, **parse_kwargs):
|
||||
state["cue_out"] = True
|
||||
|
||||
|
||||
def _parse_version(**parse_kwargs):
|
||||
return _parse_simple_parameter(cast_to=int, **parse_kwargs)
|
||||
|
||||
|
||||
def _parse_allow_cache(**parse_kwargs):
|
||||
return _parse_simple_parameter(cast_to=str, **parse_kwargs)
|
||||
|
||||
|
||||
def _parse_playlist_type(line, data, **kwargs):
|
||||
return _parse_simple_parameter(line, data)
|
||||
|
||||
|
||||
def _parse_x_map(line, data, state, **kwargs):
|
||||
quoted_parser = remove_quotes_parser("uri", "byterange")
|
||||
segment_map_info = _parse_attribute_list(protocol.ext_x_map, line, quoted_parser)
|
||||
state["current_segment_map"] = segment_map_info
|
||||
data["segment_map"].append(segment_map_info)
|
||||
|
||||
|
||||
def _parse_start(line, data, **kwargs):
|
||||
attribute_parser = {"time_offset": lambda x: float(x)}
|
||||
start_info = _parse_attribute_list(protocol.ext_x_start, line, attribute_parser)
|
||||
data["start"] = start_info
|
||||
|
||||
|
||||
def _parse_gap(state, **kwargs):
|
||||
state["gap"] = True
|
||||
|
||||
|
||||
def _parse_simple_parameter_raw_value(line, cast_to=str, normalize=False, **kwargs):
|
||||
param, value = line.split(":", 1)
|
||||
param = normalize_attribute(param.replace("#EXT-X-", ""))
|
||||
if normalize:
|
||||
value = value.strip().lower()
|
||||
return param, cast_to(value)
|
||||
|
||||
|
||||
def _parse_and_set_simple_parameter_raw_value(
|
||||
line, data, cast_to=str, normalize=False, **kwargs
|
||||
):
|
||||
param, value = _parse_simple_parameter_raw_value(line, cast_to, normalize)
|
||||
data[param] = value
|
||||
return data[param]
|
||||
|
||||
|
||||
def _parse_simple_parameter(line, data, cast_to=str, **kwargs):
|
||||
return _parse_and_set_simple_parameter_raw_value(line, data, cast_to, True)
|
||||
|
||||
|
||||
def _parse_i_frames_only(data, **kwargs):
|
||||
data["is_i_frames_only"] = True
|
||||
|
||||
|
||||
def _parse_is_independent_segments(data, **kwargs):
|
||||
data["is_independent_segments"] = True
|
||||
|
||||
|
||||
def _parse_endlist(data, **kwargs):
|
||||
data["is_endlist"] = True
|
||||
|
||||
|
||||
def _parse_cueout_cont(line, state, **kwargs):
|
||||
state["cue_out"] = True
|
||||
|
||||
elements = line.split(":", 1)
|
||||
if len(elements) != 2:
|
||||
return
|
||||
|
||||
# EXT-X-CUE-OUT-CONT:ElapsedTime=10,Duration=60,SCTE35=... style
|
||||
cue_info = _parse_attribute_list(
|
||||
protocol.ext_x_cue_out_cont,
|
||||
line,
|
||||
remove_quotes_parser("duration", "elapsedtime", "scte35"),
|
||||
)
|
||||
|
||||
# EXT-X-CUE-OUT-CONT:2.436/120 style
|
||||
progress = cue_info.get("")
|
||||
if progress:
|
||||
progress_parts = progress.split("/", 1)
|
||||
if len(progress_parts) == 1:
|
||||
state["current_cue_out_duration"] = progress_parts[0]
|
||||
else:
|
||||
state["current_cue_out_elapsedtime"] = progress_parts[0]
|
||||
state["current_cue_out_duration"] = progress_parts[1]
|
||||
|
||||
duration = cue_info.get("duration")
|
||||
if duration:
|
||||
state["current_cue_out_duration"] = duration
|
||||
|
||||
scte35 = cue_info.get("scte35")
|
||||
if duration:
|
||||
state["current_cue_out_scte35"] = scte35
|
||||
|
||||
elapsedtime = cue_info.get("elapsedtime")
|
||||
if elapsedtime:
|
||||
state["current_cue_out_elapsedtime"] = elapsedtime
|
||||
|
||||
|
||||
def _parse_cueout(line, state, **kwargs):
|
||||
state["cue_out_start"] = True
|
||||
state["cue_out"] = True
|
||||
if "DURATION" in line.upper():
|
||||
state["cue_out_explicitly_duration"] = True
|
||||
|
||||
elements = line.split(":", 1)
|
||||
if len(elements) != 2:
|
||||
return
|
||||
|
||||
cue_info = _parse_attribute_list(
|
||||
protocol.ext_x_cue_out,
|
||||
line,
|
||||
remove_quotes_parser("cue"),
|
||||
)
|
||||
cue_out_scte35 = cue_info.get("cue")
|
||||
cue_out_duration = cue_info.get("duration") or cue_info.get("")
|
||||
|
||||
current_cue_out_scte35 = state.get("current_cue_out_scte35")
|
||||
state["current_cue_out_scte35"] = cue_out_scte35 or current_cue_out_scte35
|
||||
state["current_cue_out_duration"] = cue_out_duration
|
||||
|
||||
|
||||
def _parse_server_control(line, data, **kwargs):
|
||||
attribute_parser = {
|
||||
"can_block_reload": str,
|
||||
"hold_back": lambda x: float(x),
|
||||
"part_hold_back": lambda x: float(x),
|
||||
"can_skip_until": lambda x: float(x),
|
||||
"can_skip_dateranges": str,
|
||||
}
|
||||
|
||||
data["server_control"] = _parse_attribute_list(
|
||||
protocol.ext_x_server_control, line, attribute_parser
|
||||
)
|
||||
|
||||
|
||||
def _parse_part_inf(line, data, **kwargs):
|
||||
attribute_parser = {"part_target": lambda x: float(x)}
|
||||
|
||||
data["part_inf"] = _parse_attribute_list(
|
||||
protocol.ext_x_part_inf, line, attribute_parser
|
||||
)
|
||||
|
||||
|
||||
def _parse_rendition_report(line, data, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("uri")
|
||||
attribute_parser["last_msn"] = int
|
||||
attribute_parser["last_part"] = int
|
||||
|
||||
rendition_report = _parse_attribute_list(
|
||||
protocol.ext_x_rendition_report, line, attribute_parser
|
||||
)
|
||||
|
||||
data["rendition_reports"].append(rendition_report)
|
||||
|
||||
|
||||
def _parse_part(line, state, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("uri")
|
||||
attribute_parser["duration"] = lambda x: float(x)
|
||||
attribute_parser["independent"] = str
|
||||
attribute_parser["gap"] = str
|
||||
attribute_parser["byterange"] = str
|
||||
|
||||
part = _parse_attribute_list(protocol.ext_x_part, line, attribute_parser)
|
||||
|
||||
# this should always be true according to spec
|
||||
if state.get("current_program_date_time"):
|
||||
part["program_date_time"] = state["current_program_date_time"]
|
||||
state["current_program_date_time"] += timedelta(seconds=part["duration"])
|
||||
|
||||
part["dateranges"] = state.pop("dateranges", None)
|
||||
part["gap_tag"] = state.pop("gap", None)
|
||||
|
||||
if "segment" not in state:
|
||||
state["segment"] = {}
|
||||
segment = state["segment"]
|
||||
if "parts" not in segment:
|
||||
segment["parts"] = []
|
||||
|
||||
segment["parts"].append(part)
|
||||
|
||||
|
||||
def _parse_skip(line, data, **parse_kwargs):
|
||||
attribute_parser = remove_quotes_parser("recently_removed_dateranges")
|
||||
attribute_parser["skipped_segments"] = int
|
||||
|
||||
data["skip"] = _parse_attribute_list(protocol.ext_x_skip, line, attribute_parser)
|
||||
|
||||
|
||||
def _parse_session_data(line, data, **kwargs):
|
||||
quoted = remove_quotes_parser("data_id", "value", "uri", "language")
|
||||
session_data = _parse_attribute_list(protocol.ext_x_session_data, line, quoted)
|
||||
data["session_data"].append(session_data)
|
||||
|
||||
|
||||
def _parse_session_key(line, data, **kwargs):
|
||||
params = ATTRIBUTELISTPATTERN.split(
|
||||
line.replace(protocol.ext_x_session_key + ":", "")
|
||||
)[1::2]
|
||||
key = {}
|
||||
for param in params:
|
||||
name, value = param.split("=", 1)
|
||||
key[normalize_attribute(name)] = remove_quotes(value)
|
||||
data["session_keys"].append(key)
|
||||
|
||||
|
||||
def _parse_preload_hint(line, data, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("uri")
|
||||
attribute_parser["type"] = str
|
||||
attribute_parser["byterange_start"] = int
|
||||
attribute_parser["byterange_length"] = int
|
||||
|
||||
data["preload_hint"] = _parse_attribute_list(
|
||||
protocol.ext_x_preload_hint, line, attribute_parser
|
||||
)
|
||||
|
||||
|
||||
def _parse_daterange(line, state, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("id", "class", "start_date", "end_date")
|
||||
attribute_parser["duration"] = float
|
||||
attribute_parser["planned_duration"] = float
|
||||
attribute_parser["end_on_next"] = str
|
||||
attribute_parser["scte35_cmd"] = str
|
||||
attribute_parser["scte35_out"] = str
|
||||
attribute_parser["scte35_in"] = str
|
||||
|
||||
parsed = _parse_attribute_list(protocol.ext_x_daterange, line, attribute_parser)
|
||||
|
||||
if "dateranges" not in state:
|
||||
state["dateranges"] = []
|
||||
|
||||
state["dateranges"].append(parsed)
|
||||
|
||||
|
||||
def _parse_content_steering(line, data, **kwargs):
|
||||
attribute_parser = remove_quotes_parser("server_uri", "pathway_id")
|
||||
|
||||
data["content_steering"] = _parse_attribute_list(
|
||||
protocol.ext_x_content_steering, line, attribute_parser
|
||||
)
|
||||
|
||||
|
||||
def _parse_oatcls_scte35(line, state, **kwargs):
|
||||
scte35_cue = line.split(":", 1)[1]
|
||||
state["current_cue_out_oatcls_scte35"] = scte35_cue
|
||||
state["current_cue_out_scte35"] = scte35_cue
|
||||
|
||||
|
||||
def _parse_asset(line, state, **kwargs):
|
||||
# EXT-X-ASSET attribute values may or may not be quoted, and need to be URL-encoded.
|
||||
# They are preserved as-is here to prevent loss of information.
|
||||
state["asset_metadata"] = _parse_attribute_list(
|
||||
protocol.ext_x_asset, line, {}, default_parser=str
|
||||
)
|
||||
|
||||
|
||||
def string_to_lines(string):
|
||||
return string.strip().splitlines()
|
||||
|
||||
|
||||
def remove_quotes_parser(*attrs):
|
||||
return dict(zip(attrs, itertools.repeat(remove_quotes)))
|
||||
|
||||
|
||||
def remove_quotes(string):
|
||||
"""
|
||||
Remove quotes from string.
|
||||
|
||||
Ex.:
|
||||
"foo" -> foo
|
||||
'foo' -> foo
|
||||
'foo -> 'foo
|
||||
|
||||
"""
|
||||
quotes = ('"', "'")
|
||||
if string.startswith(quotes) and string.endswith(quotes):
|
||||
return string[1:-1]
|
||||
return string
|
||||
|
||||
|
||||
def normalize_attribute(attribute):
|
||||
return attribute.replace("-", "_").lower().strip()
|
||||
|
||||
|
||||
def get_segment_custom_value(state, key, default=None):
|
||||
"""
|
||||
Helper function for getting custom values for Segment
|
||||
Are useful with custom_tags_parser
|
||||
"""
|
||||
if "segment" not in state:
|
||||
return default
|
||||
if "custom_parser_values" not in state["segment"]:
|
||||
return default
|
||||
return state["segment"]["custom_parser_values"].get(key, default)
|
||||
|
||||
|
||||
def save_segment_custom_value(state, key, value):
|
||||
"""
|
||||
Helper function for saving custom values for Segment
|
||||
Are useful with custom_tags_parser
|
||||
"""
|
||||
if "segment" not in state:
|
||||
state["segment"] = {}
|
||||
|
||||
if "custom_parser_values" not in state["segment"]:
|
||||
state["segment"]["custom_parser_values"] = {}
|
||||
|
||||
state["segment"]["custom_parser_values"][key] = value
|
||||
45
scripts/m3u8/m3u8/protocol.py
Normal file
45
scripts/m3u8/m3u8/protocol.py
Normal file
@ -0,0 +1,45 @@
|
||||
# Copyright 2014 Globo.com Player authors. All rights reserved.
|
||||
# Use of this source code is governed by a MIT License
|
||||
# license that can be found in the LICENSE file.
|
||||
|
||||
ext_m3u = "#EXTM3U"
|
||||
ext_x_targetduration = "#EXT-X-TARGETDURATION"
|
||||
ext_x_media_sequence = "#EXT-X-MEDIA-SEQUENCE"
|
||||
ext_x_discontinuity_sequence = "#EXT-X-DISCONTINUITY-SEQUENCE"
|
||||
ext_x_program_date_time = "#EXT-X-PROGRAM-DATE-TIME"
|
||||
ext_x_media = "#EXT-X-MEDIA"
|
||||
ext_x_playlist_type = "#EXT-X-PLAYLIST-TYPE"
|
||||
ext_x_key = "#EXT-X-KEY"
|
||||
ext_x_stream_inf = "#EXT-X-STREAM-INF"
|
||||
ext_x_version = "#EXT-X-VERSION"
|
||||
ext_x_allow_cache = "#EXT-X-ALLOW-CACHE"
|
||||
ext_x_endlist = "#EXT-X-ENDLIST"
|
||||
extinf = "#EXTINF"
|
||||
ext_i_frames_only = "#EXT-X-I-FRAMES-ONLY"
|
||||
ext_x_asset = "#EXT-X-ASSET"
|
||||
ext_x_bitrate = "#EXT-X-BITRATE"
|
||||
ext_x_byterange = "#EXT-X-BYTERANGE"
|
||||
ext_x_i_frame_stream_inf = "#EXT-X-I-FRAME-STREAM-INF"
|
||||
ext_x_discontinuity = "#EXT-X-DISCONTINUITY"
|
||||
ext_x_cue_out = "#EXT-X-CUE-OUT"
|
||||
ext_x_cue_out_cont = "#EXT-X-CUE-OUT-CONT"
|
||||
ext_x_cue_in = "#EXT-X-CUE-IN"
|
||||
ext_x_cue_span = "#EXT-X-CUE-SPAN"
|
||||
ext_oatcls_scte35 = "#EXT-OATCLS-SCTE35"
|
||||
ext_is_independent_segments = "#EXT-X-INDEPENDENT-SEGMENTS"
|
||||
ext_x_map = "#EXT-X-MAP"
|
||||
ext_x_start = "#EXT-X-START"
|
||||
ext_x_server_control = "#EXT-X-SERVER-CONTROL"
|
||||
ext_x_part_inf = "#EXT-X-PART-INF"
|
||||
ext_x_part = "#EXT-X-PART"
|
||||
ext_x_rendition_report = "#EXT-X-RENDITION-REPORT"
|
||||
ext_x_skip = "#EXT-X-SKIP"
|
||||
ext_x_session_data = "#EXT-X-SESSION-DATA"
|
||||
ext_x_session_key = "#EXT-X-SESSION-KEY"
|
||||
ext_x_preload_hint = "#EXT-X-PRELOAD-HINT"
|
||||
ext_x_daterange = "#EXT-X-DATERANGE"
|
||||
ext_x_gap = "#EXT-X-GAP"
|
||||
ext_x_content_steering = "#EXT-X-CONTENT-STEERING"
|
||||
ext_x_image_stream_inf = "#EXT-X-IMAGE-STREAM-INF"
|
||||
ext_x_images_only = "#EXT-X-IMAGES-ONLY"
|
||||
ext_x_tiles = "#EXT-X-TILES"
|
||||
37
scripts/m3u8/m3u8/version_matching.py
Normal file
37
scripts/m3u8/m3u8/version_matching.py
Normal file
@ -0,0 +1,37 @@
|
||||
from m3u8 import protocol
|
||||
from m3u8.version_matching_rules import VersionMatchingError, available_rules
|
||||
|
||||
|
||||
def get_version(file_lines: list[str]):
|
||||
for line in file_lines:
|
||||
if line.startswith(protocol.ext_x_version):
|
||||
version = line.split(":")[1]
|
||||
return float(version)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def valid_in_all_rules(
|
||||
line_number: int, line: str, version: float
|
||||
) -> list[VersionMatchingError]:
|
||||
errors = []
|
||||
for rule in available_rules:
|
||||
validator = rule(version, line_number, line)
|
||||
|
||||
if not validator.validate():
|
||||
errors.append(validator.get_error())
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate(file_lines: list[str]) -> list[VersionMatchingError]:
|
||||
found_version = get_version(file_lines)
|
||||
if found_version is None:
|
||||
return []
|
||||
|
||||
errors = []
|
||||
for number, line in enumerate(file_lines):
|
||||
errors_in_line = valid_in_all_rules(number, line, found_version)
|
||||
errors.extend(errors_in_line)
|
||||
|
||||
return errors
|
||||
108
scripts/m3u8/m3u8/version_matching_rules.py
Normal file
108
scripts/m3u8/m3u8/version_matching_rules.py
Normal file
@ -0,0 +1,108 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from m3u8 import protocol
|
||||
|
||||
|
||||
@dataclass
|
||||
class VersionMatchingError(Exception):
|
||||
line_number: int
|
||||
line: str
|
||||
how_to_fix: str = "Please fix the version matching error."
|
||||
description: str = "There is a version matching error in the file."
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
"Version matching error found in the file when parsing in strict mode.\n"
|
||||
f"Line {self.line_number}: {self.description}\n"
|
||||
f"Line content: {self.line}\n"
|
||||
f"How to fix: {self.how_to_fix}"
|
||||
"\n"
|
||||
)
|
||||
|
||||
|
||||
class VersionMatchRuleBase:
|
||||
description: str = ""
|
||||
how_to_fix: str = ""
|
||||
version: float
|
||||
line_number: int
|
||||
line: str
|
||||
|
||||
def __init__(self, version: float, line_number: int, line: str) -> None:
|
||||
self.version = version
|
||||
self.line_number = line_number
|
||||
self.line = line
|
||||
|
||||
def validate(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_error(self):
|
||||
return VersionMatchingError(
|
||||
line_number=self.line_number,
|
||||
line=self.line,
|
||||
description=self.description,
|
||||
how_to_fix=self.how_to_fix,
|
||||
)
|
||||
|
||||
|
||||
class ValidIVInEXTXKEY(VersionMatchRuleBase):
|
||||
description = (
|
||||
"You must use at least protocol version 2 if you have IV in EXT-X-KEY."
|
||||
)
|
||||
how_to_fix = "Change the protocol version to 2 or higher."
|
||||
|
||||
def validate(self):
|
||||
if protocol.ext_x_key not in self.line:
|
||||
return True
|
||||
|
||||
if "IV" in self.line:
|
||||
return self.version >= 2
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class ValidFloatingPointEXTINF(VersionMatchRuleBase):
|
||||
description = "You must use at least protocol version 3 if you have floating point EXTINF duration values."
|
||||
how_to_fix = "Change the protocol version to 3 or higher."
|
||||
|
||||
def validate(self):
|
||||
if protocol.extinf not in self.line:
|
||||
return True
|
||||
|
||||
chunks = self.line.replace(protocol.extinf + ":", "").split(",", 1)
|
||||
duration = chunks[0]
|
||||
|
||||
def is_number(value: str):
|
||||
try:
|
||||
float(value)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def is_floating_number(value: str):
|
||||
return is_number(value) and "." in value
|
||||
|
||||
if is_floating_number(duration):
|
||||
return self.version >= 3
|
||||
|
||||
return is_number(duration)
|
||||
|
||||
|
||||
class ValidEXTXBYTERANGEOrEXTXIFRAMESONLY(VersionMatchRuleBase):
|
||||
description = "You must use at least protocol version 4 if you have EXT-X-BYTERANGE or EXT-X-IFRAME-ONLY."
|
||||
how_to_fix = "Change the protocol version to 4 or higher."
|
||||
|
||||
def validate(self):
|
||||
if (
|
||||
protocol.ext_x_byterange not in self.line
|
||||
and protocol.ext_i_frames_only not in self.line
|
||||
):
|
||||
return True
|
||||
|
||||
return self.version >= 4
|
||||
|
||||
|
||||
available_rules: list[type[VersionMatchRuleBase]] = [
|
||||
ValidIVInEXTXKEY,
|
||||
ValidFloatingPointEXTINF,
|
||||
ValidEXTXBYTERANGEOrEXTXIFRAMESONLY,
|
||||
]
|
||||
8
scripts/m3u8/requirements-dev.txt
Normal file
8
scripts/m3u8/requirements-dev.txt
Normal file
@ -0,0 +1,8 @@
|
||||
-r requirements.txt
|
||||
bottle
|
||||
pytest
|
||||
# pytest-cov 2.6.0 has increased the version requirement
|
||||
# for the coverage package from >=3.7.1 to >=4.4,
|
||||
# which is in conflict with the version requirement
|
||||
# defined by the python-coveralls package for coverage==4.0.3
|
||||
pytest-cov>=2.4.0,<2.6
|
||||
1
scripts/m3u8/requirements.txt
Normal file
1
scripts/m3u8/requirements.txt
Normal file
@ -0,0 +1 @@
|
||||
backports-datetime-fromisoformat; python_version < '3.11'
|
||||
36
scripts/m3u8/runtests
Normal file
36
scripts/m3u8/runtests
Normal file
@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
test_server_stdout=tests/server.stdout
|
||||
|
||||
function install_deps {
|
||||
pip install -r requirements-dev.txt
|
||||
}
|
||||
|
||||
function start_server {
|
||||
rm -f ${test_server_stdout}
|
||||
python tests/m3u8server.py >${test_server_stdout} 2>&1 &
|
||||
}
|
||||
|
||||
function stop_server {
|
||||
pkill -9 -f m3u8server.py
|
||||
echo "Test server stdout on ${test_server_stdout}"
|
||||
}
|
||||
|
||||
function run {
|
||||
PYTHONPATH=. py.test -vv --cov-report term-missing --cov m3u8 tests/
|
||||
}
|
||||
|
||||
function main {
|
||||
install_deps
|
||||
start_server
|
||||
run
|
||||
retval=$?
|
||||
stop_server
|
||||
return "$retval"
|
||||
}
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
main
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
28
scripts/m3u8/setup.py
Normal file
28
scripts/m3u8/setup.py
Normal file
@ -0,0 +1,28 @@
|
||||
from os.path import abspath, dirname, exists, join
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
long_description = None
|
||||
if exists("README.md"):
|
||||
with open("README.md") as file:
|
||||
long_description = file.read()
|
||||
|
||||
install_reqs = [
|
||||
req for req in open(abspath(join(dirname(__file__), "requirements.txt")))
|
||||
]
|
||||
|
||||
setup(
|
||||
name="m3u8",
|
||||
author="Globo.com",
|
||||
version="6.0.0",
|
||||
license="MIT",
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
install_requires=install_reqs,
|
||||
packages=["m3u8"],
|
||||
url="https://github.com/globocom/m3u8",
|
||||
description="Python m3u8 parser",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
python_requires=">=3.9",
|
||||
)
|
||||
@ -109,7 +109,8 @@ def get_cdm(log, service, profile=None, cdm_name=None):
|
||||
return device
|
||||
except FileNotFoundError:
|
||||
try:
|
||||
return Device.from_dir(os.path.join(directories.devices, cdm_name))
|
||||
device = Device.from_dir(os.path.join(directories.devices, cdm_name))
|
||||
return device
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@ -134,8 +134,8 @@ class Track:
|
||||
|
||||
url = None
|
||||
|
||||
if self.descriptor == self.Descriptor.M3U:
|
||||
master = m3u8.loads(session.get(as_list(self.url)[0]).text, uri=self.url)
|
||||
if self.descriptor == self.Descriptor.M3U and self.extra[1]:
|
||||
master = self.extra[1]
|
||||
for segment in master.segments:
|
||||
if not segment.init_section:
|
||||
continue
|
||||
@ -284,16 +284,8 @@ class Track:
|
||||
) + ".mp4"
|
||||
save_path = os.path.join(out, name)
|
||||
|
||||
if self.descriptor == self.Descriptor.M3U:
|
||||
master = m3u8.loads(
|
||||
requests.get(
|
||||
as_list(self.url)[0],
|
||||
headers=headers,
|
||||
proxies={"all": proxy} if self.needs_proxy and proxy else None
|
||||
).text,
|
||||
uri=as_list(self.url)[0]
|
||||
)
|
||||
|
||||
if self.descriptor == self.Descriptor.M3U and self.extra[1]:
|
||||
master = self.extra[1]
|
||||
# Keys may be [] or [None] if unencrypted
|
||||
if any(master.keys + master.session_keys):
|
||||
self.encrypted = True
|
||||
|
||||
@ -34,14 +34,12 @@ def parse(master, source=None):
|
||||
raise ValueError("Tracks.from_m3u8: Expected a Variant Playlist M3U8 document...")
|
||||
|
||||
# get pssh if available
|
||||
# uses master.data.session_keys instead of master.keys as master.keys is ONLY EXT-X-KEYS and
|
||||
# uses master.session_keys instead of master.keys as master.keys is ONLY EXT-X-KEYS and
|
||||
# doesn't include EXT-X-SESSION-KEYS which is whats used for variant playlist M3U8.
|
||||
keys = [x.uri for x in master.session_keys if x.keyformat.lower() == "com.microsoft.playready"]
|
||||
psshPR = keys[0].split(",")[-1] if keys else None
|
||||
|
||||
psshPR = [x.uri.split(",")[-1] for x in master.session_keys if x.keyformat.lower() == "com.microsoft.playready"][0]
|
||||
psshWV = [x.uri.split(",")[-1] for x in master.session_keys if x.keyformat.lower() == "urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"][0]
|
||||
|
||||
widevine_keys = [x.uri for x in master.session_keys if x.keyformat.lower() == "urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"]
|
||||
psshWV = widevine_keys[0].split(",")[-1] if widevine_keys else None
|
||||
# if pssh:
|
||||
# pssh = base64.b64decode(pssh)
|
||||
# # noinspection PyBroadException
|
||||
@ -79,10 +77,30 @@ def parse(master, source=None):
|
||||
descriptor=Track.Descriptor.M3U,
|
||||
# decryption
|
||||
encrypted=bool(master.keys or master.session_keys),
|
||||
psshWV=psshWV,
|
||||
psshPR=psshPR,
|
||||
psshWV=[
|
||||
session_key.uri.split(",")[-1]
|
||||
for session_key in master.session_keys
|
||||
if (
|
||||
session_key.keyformat.lower() == "urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed"
|
||||
and
|
||||
source == "DSNP"
|
||||
and
|
||||
x.stream_info.characteristics.lower().replace('"', "") in session_key.characteristics.lower().split(",")
|
||||
)
|
||||
][0] or psshWV,
|
||||
psshPR=[
|
||||
session_key.uri.split(",")[-1]
|
||||
for session_key in master.session_keys
|
||||
if (
|
||||
session_key.keyformat.lower() == "com.microsoft.playready"
|
||||
and
|
||||
source == "DSNP"
|
||||
and
|
||||
x.stream_info.characteristics.lower().replace('"', "") in session_key.characteristics.lower().split(",")
|
||||
)
|
||||
][0] or psshPR,
|
||||
# extra
|
||||
extra=x
|
||||
extra=(x, master)
|
||||
) for x in master.playlists],
|
||||
# AUDIO
|
||||
[AudioTrack(
|
||||
|
||||
@ -265,7 +265,7 @@ class AppleTVPlus(BaseService):
|
||||
{
|
||||
"challenge": base64.b64encode(challenge.encode('utf-8')).decode('utf-8'),
|
||||
"key-system": "com.microsoft.playready",
|
||||
"uri": f"data:text/plain;charset=UTF-16;base64,{track.pssh}",
|
||||
"uri": f"data:text/plain;charset=UTF-16;base64,{track.psshPR}",
|
||||
"id": 1,
|
||||
"lease-action": 'start',
|
||||
"adamId": self.extra_server_parameters['adamId'],
|
||||
|
||||
@ -122,6 +122,7 @@ class DisneyPlus(BaseService):
|
||||
self.range = ctx.parent.params["range_"]
|
||||
self.wanted = ctx.parent.params["wanted"]
|
||||
self.quality = ctx.parent.params["quality"] or 1080
|
||||
self.range = ctx.parent.params["range_"] or "SDR"
|
||||
|
||||
self.playready = True if "certificate_chain" in dir(ctx.obj.cdm) else False # ctx.obj.cdm.device.type == LocalDevice.Types.PLAYREADY
|
||||
|
||||
@ -130,6 +131,9 @@ class DisneyPlus(BaseService):
|
||||
self.device_token = None
|
||||
self.account_tokens = {}
|
||||
|
||||
if self.range == "DV+HDR":
|
||||
self.scenario = "android~unlimited"
|
||||
|
||||
self.configure()
|
||||
|
||||
def get_titles(self):
|
||||
@ -581,8 +585,8 @@ class DisneyPlus(BaseService):
|
||||
return manifest["stream"]["sources"][0]['complete']['url']
|
||||
|
||||
def get_manifest_tracks(self, url):
|
||||
self.session.get(url)
|
||||
tracks = Tracks.from_m3u8(m3u8.load(url), source=self.ALIASES[0])
|
||||
manifest = self.session.get(url).text
|
||||
tracks = Tracks.from_m3u8(m3u8.loads(content=manifest, uri=url), source=self.ALIASES[0])
|
||||
if self.acodec:
|
||||
tracks.audios = [
|
||||
x for x in tracks.audios if (x.codec or "").split("-")[0] in self.AUDIO_CODEC_MAP[self.acodec]
|
||||
|
||||
@ -53,7 +53,7 @@ class MoviesAnywhere(BaseService):
|
||||
super().__init__(ctx)
|
||||
self.parse_title(ctx, title)
|
||||
self.configure()
|
||||
|
||||
self.playready = True if "certificate_chain" in dir(ctx.obj.cdm) else False #ctx.obj.cdm.device.type == LocalDevice.Types.PLAYREADY
|
||||
self.atmos = ctx.parent.params["atmos"]
|
||||
self.vcodec = ctx.parent.params["vcodec"]
|
||||
self.acodec = ctx.parent.params["acodec"]
|
||||
@ -150,7 +150,6 @@ class MoviesAnywhere(BaseService):
|
||||
|
||||
def get_tracks(self, title):
|
||||
player_data = self.content["data"]["page"]["components"][0]["mainAction"]["playerData"]["playable"]
|
||||
|
||||
videos = []
|
||||
audios = []
|
||||
for cr in player_data["videoAssets"]["dash"].values():
|
||||
@ -167,7 +166,7 @@ class MoviesAnywhere(BaseService):
|
||||
pssh = self.get_pssh_init(manifest["url"])
|
||||
video_pssh = Box.parse(base64.b64decode(pssh))
|
||||
video.pssh = video_pssh
|
||||
video.license_url = manifest["widevineLaUrl"]
|
||||
video.license_url = manifest["playreadyLaUrl"] if self.playready else manifest["widevineLaUrl"]
|
||||
video.contentId = URL(video.license_url).params._dict["ContentId"][
|
||||
0
|
||||
]
|
||||
@ -175,7 +174,7 @@ class MoviesAnywhere(BaseService):
|
||||
# Extract Atmos audio track if available.
|
||||
for audio in tracks.audios:
|
||||
audio.pssh = video_pssh
|
||||
audio.license_url = manifest["widevineLaUrl"]
|
||||
audio.license_url = manifest["playreadyLaUrl"] if self.playready else manifest["widevineLaUrl"]
|
||||
audio.contentId = URL(audio.license_url).params._dict["ContentId"][
|
||||
0
|
||||
]
|
||||
|
||||
@ -276,7 +276,7 @@ async def m3u8dl(uri, out, track, headers=None, proxy=None):
|
||||
arguments.extend(["--http-request-timeout", "8"])
|
||||
if track.__class__.__name__ == "VideoTrack":
|
||||
from vinetrimmer.objects.tracks import Track
|
||||
if track.height and not (track.descriptor == Track.Descriptor.M3U) and track.source != "HS":
|
||||
if track.height and not (track.descriptor == Track.Descriptor.M3U):
|
||||
arguments.extend([
|
||||
"-sv", f"res='{track.height}*':codec='{track.codec}':for=best"
|
||||
])
|
||||
|
||||
@ -71,6 +71,8 @@ def main(debug):
|
||||
if len(sys.argv) > 1 and sys.argv[1].lower() == "dl":
|
||||
sys.argv.pop(1)
|
||||
|
||||
sys.set_int_max_str_digits(10000) # This is necessary for str to int conversion to work
|
||||
|
||||
dl()
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -21,9 +21,9 @@ cdm_api:
|
||||
security_level: 1
|
||||
|
||||
credentials:
|
||||
iTunes: ''
|
||||
iTunes: 'username:password'
|
||||
Hotstar: 'username:password'
|
||||
DisneyPlus: 'email:password'
|
||||
DisneyPlus: 'tjp4252@gmail.com:Tjcooke@121382'
|
||||
|
||||
directories:
|
||||
temp: ''
|
||||
|
||||
Loading…
Reference in New Issue
Block a user