mirror of
https://cdm-project.com/Download-Tools/udemy-downloader.git
synced 2025-04-30 00:44:24 +02:00
We can now extract the PSSH box from the initalization mp4 to get the Key ID for decryption. Also added re-enabled decryption
This commit is contained in:
parent
8d41a494c9
commit
e97909ddca
@ -6,12 +6,13 @@ from mpegdash.utils import (
|
|||||||
parse_attr_value, parse_child_nodes, parse_node_value,
|
parse_attr_value, parse_child_nodes, parse_node_value,
|
||||||
write_attr_value, write_child_node, write_node_value
|
write_attr_value, write_child_node, write_node_value
|
||||||
)
|
)
|
||||||
|
from utils import extract_kid
|
||||||
|
|
||||||
#global ids
|
#global ids
|
||||||
retry = 3
|
retry = 3
|
||||||
download_dir = os.getcwd() # set the folder to output
|
download_dir = os.getcwd() + '\out_dir' # set the folder to output
|
||||||
working_dir = os.getcwd() + "\working_dir" # set the folder to download ephemeral files
|
working_dir = os.getcwd() + "\working_dir" # set the folder to download ephemeral files
|
||||||
keyfile_path = download_dir + "\keyfile.json"
|
keyfile_path = os.getcwd() + "\keyfile.json"
|
||||||
|
|
||||||
if not os.path.exists(working_dir):
|
if not os.path.exists(working_dir):
|
||||||
os.makedirs(working_dir)
|
os.makedirs(working_dir)
|
||||||
@ -111,9 +112,9 @@ def mux_process(video_title,outfile):
|
|||||||
command = f"nice -n 7 ffmpeg -y -i decrypted_audio.mp4 -i decrypted_video.mp4 -acodec copy -vcodec copy -fflags +bitexact -map_metadata -1 -metadata title=\"{video_title}\" -metadata creation_time=2020-00-00T70:05:30.000000Z {outfile}.mp4"
|
command = f"nice -n 7 ffmpeg -y -i decrypted_audio.mp4 -i decrypted_video.mp4 -acodec copy -vcodec copy -fflags +bitexact -map_metadata -1 -metadata title=\"{video_title}\" -metadata creation_time=2020-00-00T70:05:30.000000Z {outfile}.mp4"
|
||||||
os.system(command)
|
os.system(command)
|
||||||
|
|
||||||
def decrypt(filename):
|
def decrypt(kid,filename):
|
||||||
try:
|
try:
|
||||||
key = keyfile["0"]
|
key = keyfile[kid.lower()]
|
||||||
except KeyError as error:
|
except KeyError as error:
|
||||||
exit("Key not found")
|
exit("Key not found")
|
||||||
if(os.name == "nt"):
|
if(os.name == "nt"):
|
||||||
@ -125,7 +126,11 @@ def decrypt(filename):
|
|||||||
def handle_irregular_segments(media_info,video_title,output_path):
|
def handle_irregular_segments(media_info,video_title,output_path):
|
||||||
no_segment,video_url,video_init,video_extension,no_segment,audio_url,audio_init,audio_extension = media_info
|
no_segment,video_url,video_init,video_extension,no_segment,audio_url,audio_init,audio_extension = media_info
|
||||||
download_media("video_0.seg.mp4",video_init)
|
download_media("video_0.seg.mp4",video_init)
|
||||||
|
video_kid = extract_kid("video_0.seg.mp4")
|
||||||
|
print("KID for video file is: " + video_kid)
|
||||||
download_media("audio_0.seg.mp4",audio_init)
|
download_media("audio_0.seg.mp4",audio_init)
|
||||||
|
audio_kid = extract_kid("audio_0.seg.mp4")
|
||||||
|
print("KID for audio file is: " + audio_kid)
|
||||||
for count in range(1,no_segment):
|
for count in range(1,no_segment):
|
||||||
video_segment_url = video_url.replace("$Number$",str(count))
|
video_segment_url = video_url.replace("$Number$",str(count))
|
||||||
audio_segment_url = audio_url.replace("$Number$",str(count))
|
audio_segment_url = audio_url.replace("$Number$",str(count))
|
||||||
@ -142,8 +147,8 @@ def handle_irregular_segments(media_info,video_title,output_path):
|
|||||||
print(audio_concat_command)
|
print(audio_concat_command)
|
||||||
os.system(video_concat_command)
|
os.system(video_concat_command)
|
||||||
os.system(audio_concat_command)
|
os.system(audio_concat_command)
|
||||||
decrypt("video")
|
decrypt(video_kid,"video")
|
||||||
decrypt("audio")
|
decrypt(audio_kid,"audio")
|
||||||
mux_process(video_title,output_path)
|
mux_process(video_title,output_path)
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -188,11 +193,11 @@ def manifest_parser(mpd_url):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
mpd = "https://www.udemy.com/assets/25653992/encrypted-files/out/v1/6d2a767a83064e7fa747bedeb1841f7d/06c8dc12da2745f1b0b4e7c2c032dfef/842d4b8e2e014fbbb87c640ddc89d036/index.mpd?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjEzMjc0NzAsInBhdGgiOiJvdXQvdjEvNmQyYTc2N2E4MzA2NGU3ZmE3NDdiZWRlYjE4NDFmN2QvMDZjOGRjMTJkYTI3NDVmMWIwYjRlN2MyYzAzMmRmZWYvODQyZDRiOGUyZTAxNGZiYmI4N2M2NDBkZGM4OWQwMzYvIn0.4NpalcpDz0i5SXl6UYxnxECoacfRkJBHtKx5tWlJMOQ&provider=cloudfront&v=1"
|
mpd = "mpd url"
|
||||||
base_url = mpd.split("index.mpd")[0]
|
base_url = mpd.split("index.mpd")[0]
|
||||||
os.chdir(working_dir)
|
os.chdir(working_dir)
|
||||||
media_info = manifest_parser(mpd)
|
media_info = manifest_parser(mpd)
|
||||||
video_title = "171. Editing Collision Meshes"
|
video_title = "175. Inverse Transforming Vectors" # the video title that gets embeded into the mp4 file metadata
|
||||||
output_path = download_dir + "\\171. Editing Collision Meshes"
|
output_path = download_dir + "\\175. Inverse Transforming Vectors" # video title used in the filename, dont append .mp4
|
||||||
handle_irregular_segments(media_info,video_title,output_path)
|
handle_irregular_segments(media_info,video_title,output_path)
|
||||||
cleanup(working_dir)
|
cleanup(working_dir)
|
||||||
|
497
mp4parse.py
Normal file
497
mp4parse.py
Normal file
@ -0,0 +1,497 @@
|
|||||||
|
""" MP4 Parser based on:
|
||||||
|
http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
|
||||||
|
|
||||||
|
@author: Alastair McCormack
|
||||||
|
@license: MIT License
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import bitstring
|
||||||
|
from datetime import datetime
|
||||||
|
from collections import namedtuple
|
||||||
|
import logging
|
||||||
|
import six
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
#log.addHandler(logging.NullHandler())
|
||||||
|
log.setLevel(logging.WARN)
|
||||||
|
|
||||||
|
class MixinDictRepr(object):
|
||||||
|
def __repr__(self, *args, **kwargs):
|
||||||
|
return "{class_name} : {content!r} ".format(class_name=self.__class__.__name__,
|
||||||
|
content=self.__dict__)
|
||||||
|
|
||||||
|
class MixinMinimalRepr(object):
|
||||||
|
""" A minimal representaion when the payload could be large """
|
||||||
|
|
||||||
|
def __repr__(self, *args, **kwargs):
|
||||||
|
return "{class_name} : {content!r} ".format(class_name=self.__class__.__name__,
|
||||||
|
content=self.__dict__.keys())
|
||||||
|
|
||||||
|
class FragmentRunTableBox(MixinDictRepr):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnImplementedBox(MixinDictRepr):
|
||||||
|
type = "na"
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MovieFragmentBox(MixinDictRepr):
|
||||||
|
type = "moof"
|
||||||
|
|
||||||
|
class MovieBox(MixinDictRepr):
|
||||||
|
type = "moov"
|
||||||
|
|
||||||
|
class BootStrapInfoBox(MixinDictRepr):
|
||||||
|
type = "abst"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_media_time(self):
|
||||||
|
return self._current_media_time
|
||||||
|
|
||||||
|
@current_media_time.setter
|
||||||
|
def current_media_time(self, epoch_timestamp):
|
||||||
|
""" Takes a timestamp arg and saves it as datetime """
|
||||||
|
self._current_media_time = datetime.utcfromtimestamp(epoch_timestamp/float(self.time_scale))
|
||||||
|
|
||||||
|
class FragmentRandomAccessBox(MixinDictRepr):
|
||||||
|
""" aka afra """
|
||||||
|
type = "afra"
|
||||||
|
|
||||||
|
FragmentRandomAccessBoxEntry = namedtuple("FragmentRandomAccessBoxEntry", ["time", "offset"])
|
||||||
|
FragmentRandomAccessBoxGlobalEntry = namedtuple("FragmentRandomAccessBoxGlobalEntry", ["time", "segment_number", "fragment_number", "afra_offset", "sample_offset"])
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SegmentRunTable(MixinDictRepr):
|
||||||
|
type = "asrt"
|
||||||
|
|
||||||
|
SegmentRunTableEntry = namedtuple('SegmentRunTableEntry', ["first_segment", "fragments_per_segment"])
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FragmentRunTable(MixinDictRepr):
|
||||||
|
type = "afrt"
|
||||||
|
|
||||||
|
class FragmentRunTableEntry( namedtuple('FragmentRunTableEntry',
|
||||||
|
["first_fragment",
|
||||||
|
"first_fragment_timestamp",
|
||||||
|
"fragment_duration",
|
||||||
|
"discontinuity_indicator"]) ):
|
||||||
|
|
||||||
|
DI_END_OF_PRESENTATION = 0
|
||||||
|
DI_NUMBERING = 1
|
||||||
|
DI_TIMESTAMP = 2
|
||||||
|
DI_TIMESTAMP_AND_NUMBER = 3
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if self.first_fragment == other.first_fragment and \
|
||||||
|
self.first_fragment_timestamp == other.first_fragment_timestamp and \
|
||||||
|
self.fragment_duration == other.fragment_duration and \
|
||||||
|
self.discontinuity_indicator == other.discontinuity_indicator:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self, *args, **kwargs):
|
||||||
|
return str(self.__dict__)
|
||||||
|
|
||||||
|
class MediaDataBox(MixinMinimalRepr):
|
||||||
|
""" aka mdat """
|
||||||
|
type = "mdat"
|
||||||
|
|
||||||
|
class MovieFragmentHeader(MixinDictRepr):
|
||||||
|
type = "mfhd"
|
||||||
|
|
||||||
|
class ProtectionSystemSpecificHeader(MixinDictRepr):
|
||||||
|
type = "pssh"
|
||||||
|
|
||||||
|
BoxHeader = namedtuple( "BoxHeader", ["box_size", "box_type", "header_size"] )
|
||||||
|
|
||||||
|
|
||||||
|
class F4VParser(object):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse(cls, filename=None, bytes_input=None, file_input=None, offset_bytes=0, headers_only=False):
|
||||||
|
"""
|
||||||
|
Parse an MP4 file or bytes into boxes
|
||||||
|
|
||||||
|
|
||||||
|
:param filename: filename of mp4 file.
|
||||||
|
:type filename: str.
|
||||||
|
:param bytes_input: bytes of mp4 file.
|
||||||
|
:type bytes_input: bytes / Python 2.x str.
|
||||||
|
:param offset_bytes: start parsing at offset.
|
||||||
|
:type offset_bytes: int.
|
||||||
|
:param headers_only: Ignore data and return just headers. Useful when data is cut short
|
||||||
|
:type: headers_only: boolean
|
||||||
|
:return: BMFF Boxes or Headers
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
box_lookup = {
|
||||||
|
BootStrapInfoBox.type: cls._parse_abst,
|
||||||
|
FragmentRandomAccessBox.type: cls._parse_afra,
|
||||||
|
MediaDataBox.type: cls._parse_mdat,
|
||||||
|
MovieFragmentBox.type: cls._parse_moof,
|
||||||
|
MovieBox.type: cls._parse_moov,
|
||||||
|
MovieFragmentHeader.type: cls._parse_mfhd,
|
||||||
|
ProtectionSystemSpecificHeader.type: cls._parse_pssh
|
||||||
|
}
|
||||||
|
|
||||||
|
if filename:
|
||||||
|
bs = bitstring.ConstBitStream(filename=filename, offset=offset_bytes * 8)
|
||||||
|
elif bytes_input:
|
||||||
|
bs = bitstring.ConstBitStream(bytes=bytes_input, offset=offset_bytes * 8)
|
||||||
|
else:
|
||||||
|
bs = bitstring.ConstBitStream(auto=file_input, offset=offset_bytes * 8)
|
||||||
|
|
||||||
|
log.debug("Starting parse")
|
||||||
|
log.debug("Size is %d bits", bs.len)
|
||||||
|
|
||||||
|
while bs.pos < bs.len:
|
||||||
|
log.debug("Byte pos before header: %d relative to (%d)", bs.bytepos, offset_bytes)
|
||||||
|
log.debug("Reading header")
|
||||||
|
try:
|
||||||
|
header = cls._read_box_header(bs)
|
||||||
|
except bitstring.ReadError as e:
|
||||||
|
log.error("Premature end of data while reading box header")
|
||||||
|
raise
|
||||||
|
|
||||||
|
log.debug("Header type: %s", header.box_type)
|
||||||
|
log.debug("Byte pos after header: %d relative to (%d)", bs.bytepos, offset_bytes)
|
||||||
|
|
||||||
|
if headers_only:
|
||||||
|
yield header
|
||||||
|
|
||||||
|
# move pointer to next header if possible
|
||||||
|
try:
|
||||||
|
bs.bytepos += header.box_size
|
||||||
|
except ValueError:
|
||||||
|
log.warning("Premature end of data")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# Get parser method for header type
|
||||||
|
parse_function = box_lookup.get(header.box_type, cls._parse_unimplemented)
|
||||||
|
try:
|
||||||
|
yield parse_function(bs, header)
|
||||||
|
except ValueError as e:
|
||||||
|
log.error("Premature end of data")
|
||||||
|
raise
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_mp4(cls, parser):
|
||||||
|
try:
|
||||||
|
for box in parser:
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_mp4_s(cls, bytes_input):
|
||||||
|
""" Is bytes_input the contents of an MP4 file
|
||||||
|
|
||||||
|
:param bytes_input: str/bytes to check.
|
||||||
|
:type bytes_input: str/bytes.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser = cls.parse(bytes_input=bytes_input, headers_only=True)
|
||||||
|
return cls._is_mp4(parser)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_mp4(cls, file_input):
|
||||||
|
""" Checks input if it's an MP4 file
|
||||||
|
|
||||||
|
:param input: Filename or file object
|
||||||
|
:type input: str, file
|
||||||
|
:param state: Current state to be in.
|
||||||
|
:type state: bool.
|
||||||
|
:returns: bool.
|
||||||
|
:raises: AttributeError, KeyError
|
||||||
|
"""
|
||||||
|
|
||||||
|
if hasattr(file_input, "read"):
|
||||||
|
parser = cls.parse(file_input=file_input, headers_only=True)
|
||||||
|
else:
|
||||||
|
parser = cls.parse(filename=file_input, headers_only=True)
|
||||||
|
return cls._is_mp4(parser)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_string(bs):
|
||||||
|
""" read UTF8 null terminated string """
|
||||||
|
result = bs.readto('0x00', bytealigned=True).bytes.decode("utf-8")[:-1]
|
||||||
|
return result if result else None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _read_count_and_string_table(cls, bs):
|
||||||
|
""" Read a count then return the strings in a list """
|
||||||
|
result = []
|
||||||
|
entry_count = bs.read("uint:8")
|
||||||
|
for _ in six.range(0, entry_count):
|
||||||
|
result.append( cls._read_string(bs) )
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_box_header(bs):
|
||||||
|
header_start_pos = bs.bytepos
|
||||||
|
size, box_type = bs.readlist("uint:32, bytes:4")
|
||||||
|
|
||||||
|
# box_type should be an ASCII string. Decode as UTF-8 in case
|
||||||
|
try:
|
||||||
|
box_type = box_type.decode('utf-8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# we'll leave as bytes instead
|
||||||
|
pass
|
||||||
|
|
||||||
|
# if size == 1, then this is an extended size type.
|
||||||
|
# Therefore read the next 64 bits as size
|
||||||
|
if size == 1:
|
||||||
|
size = bs.read("uint:64")
|
||||||
|
header_end_pos = bs.bytepos
|
||||||
|
header_size = header_end_pos - header_start_pos
|
||||||
|
|
||||||
|
return BoxHeader(box_size=size-header_size, box_type=box_type, header_size=header_size)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_unimplemented(bs, header):
|
||||||
|
ui = UnImplementedBox()
|
||||||
|
ui.header = header
|
||||||
|
|
||||||
|
bs.bytepos += header.box_size
|
||||||
|
|
||||||
|
return ui
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_afra(cls, bs, header):
|
||||||
|
|
||||||
|
afra = FragmentRandomAccessBox()
|
||||||
|
afra.header = header
|
||||||
|
|
||||||
|
# read the entire box in case there's padding
|
||||||
|
afra_bs = bs.read(header.box_size * 8)
|
||||||
|
# skip Version and Flags
|
||||||
|
afra_bs.pos += 8 + 24
|
||||||
|
long_ids, long_offsets, global_entries, afra.time_scale, local_entry_count = \
|
||||||
|
afra_bs.readlist("bool, bool, bool, pad:5, uint:32, uint:32")
|
||||||
|
|
||||||
|
if long_ids:
|
||||||
|
id_bs_type = "uint:32"
|
||||||
|
else:
|
||||||
|
id_bs_type = "uint:16"
|
||||||
|
|
||||||
|
if long_offsets:
|
||||||
|
offset_bs_type = "uint:64"
|
||||||
|
else:
|
||||||
|
offset_bs_type = "uint:32"
|
||||||
|
|
||||||
|
log.debug("local_access_entries entry count: %s", local_entry_count)
|
||||||
|
afra.local_access_entries = []
|
||||||
|
for _ in six.range(0, local_entry_count):
|
||||||
|
time = cls._parse_time_field(afra_bs, afra.time_scale)
|
||||||
|
|
||||||
|
offset = afra_bs.read(offset_bs_type)
|
||||||
|
|
||||||
|
afra_entry = \
|
||||||
|
FragmentRandomAccessBox.FragmentRandomAccessBoxEntry(time=time,
|
||||||
|
offset=offset)
|
||||||
|
afra.local_access_entries.append(afra_entry)
|
||||||
|
|
||||||
|
afra.global_access_entries = []
|
||||||
|
|
||||||
|
if global_entries:
|
||||||
|
global_entry_count = afra_bs.read("uint:32")
|
||||||
|
|
||||||
|
log.debug("global_access_entries entry count: %s", global_entry_count)
|
||||||
|
|
||||||
|
for _ in six.range(0, global_entry_count):
|
||||||
|
time = cls._parse_time_field(afra_bs, afra.time_scale)
|
||||||
|
|
||||||
|
segment_number = afra_bs.read(id_bs_type)
|
||||||
|
fragment_number = afra_bs.read(id_bs_type)
|
||||||
|
|
||||||
|
afra_offset = afra_bs.read(offset_bs_type)
|
||||||
|
sample_offset = afra_bs.read(offset_bs_type)
|
||||||
|
|
||||||
|
afra_global_entry = \
|
||||||
|
FragmentRandomAccessBox.FragmentRandomAccessBoxGlobalEntry(
|
||||||
|
time=time,
|
||||||
|
segment_number=segment_number,
|
||||||
|
fragment_number=fragment_number,
|
||||||
|
afra_offset=afra_offset,
|
||||||
|
sample_offset=sample_offset)
|
||||||
|
|
||||||
|
afra.global_access_entries.append(afra_global_entry)
|
||||||
|
|
||||||
|
return afra
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_moof(cls, bootstrap_bs, header):
|
||||||
|
moof = MovieFragmentBox()
|
||||||
|
moof.header = header
|
||||||
|
|
||||||
|
box_bs = bootstrap_bs.read(moof.header.box_size * 8)
|
||||||
|
|
||||||
|
for child_box in cls.parse(bytes_input=box_bs.bytes):
|
||||||
|
setattr(moof, child_box.type, child_box)
|
||||||
|
|
||||||
|
return moof
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_moov(cls, bootstrap_bs, header):
|
||||||
|
moov = MovieBox()
|
||||||
|
moov.header = header
|
||||||
|
psshs = []
|
||||||
|
|
||||||
|
box_bs = bootstrap_bs.read(moov.header.box_size * 8)
|
||||||
|
|
||||||
|
for child_box in cls.parse(bytes_input=box_bs.bytes):
|
||||||
|
if(child_box.type == "pssh"):
|
||||||
|
psshs.append(child_box)
|
||||||
|
else:
|
||||||
|
setattr(moov, child_box.type, child_box)
|
||||||
|
setattr(moov, "pssh", psshs)
|
||||||
|
|
||||||
|
return moov
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_mfhd(cls, bootstrap_bs, header):
|
||||||
|
mfhd = MovieFragmentHeader()
|
||||||
|
mfhd.header = header
|
||||||
|
|
||||||
|
box_bs = bootstrap_bs.read(mfhd.header.box_size * 8)
|
||||||
|
return mfhd
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_pssh(bootstrap_bs, header):
|
||||||
|
pssh = ProtectionSystemSpecificHeader()
|
||||||
|
pssh.header = header
|
||||||
|
|
||||||
|
box_bs = bootstrap_bs.read(pssh.header.box_size*8)
|
||||||
|
# Payload appears to be 8 bytes in.
|
||||||
|
data = box_bs.hex[8:]
|
||||||
|
pssh.system_id = data[:32]
|
||||||
|
pssh.payload = data[40:]
|
||||||
|
return pssh
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_abst(cls, bootstrap_bs, header):
|
||||||
|
|
||||||
|
abst = BootStrapInfoBox()
|
||||||
|
abst.header = header
|
||||||
|
|
||||||
|
box_bs = bootstrap_bs.read(abst.header.box_size * 8)
|
||||||
|
|
||||||
|
abst.version, abst.profile_raw, abst.live, abst.update, \
|
||||||
|
abst.time_scale, abst.current_media_time, abst.smpte_timecode_offset = \
|
||||||
|
box_bs.readlist("""pad:8, pad:24, uint:32, uint:2, bool, bool,
|
||||||
|
pad:4,
|
||||||
|
uint:32, uint:64, uint:64""")
|
||||||
|
abst.movie_identifier = cls._read_string(box_bs)
|
||||||
|
|
||||||
|
abst.server_entry_table = cls._read_count_and_string_table(box_bs)
|
||||||
|
abst.quality_entry_table = cls._read_count_and_string_table(box_bs)
|
||||||
|
|
||||||
|
abst.drm_data = cls._read_string(box_bs)
|
||||||
|
abst.meta_data = cls._read_string(box_bs)
|
||||||
|
|
||||||
|
abst.segment_run_tables = []
|
||||||
|
|
||||||
|
segment_count = box_bs.read("uint:8")
|
||||||
|
log.debug("segment_count: %d" % segment_count)
|
||||||
|
for _ in six.range(0, segment_count):
|
||||||
|
abst.segment_run_tables.append( cls._parse_asrt(box_bs) )
|
||||||
|
|
||||||
|
abst.fragment_tables = []
|
||||||
|
fragment_count = box_bs.read("uint:8")
|
||||||
|
log.debug("fragment_count: %d" % fragment_count)
|
||||||
|
for _ in xrange(0, fragment_count):
|
||||||
|
abst.fragment_tables.append( cls._parse_afrt(box_bs) )
|
||||||
|
|
||||||
|
log.debug("Finished parsing abst")
|
||||||
|
|
||||||
|
return abst
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_asrt(cls, box_bs):
|
||||||
|
""" Parse asrt / Segment Run Table Box """
|
||||||
|
|
||||||
|
asrt = SegmentRunTable()
|
||||||
|
asrt.header = cls._read_box_header(box_bs)
|
||||||
|
# read the entire box in case there's padding
|
||||||
|
asrt_bs_box = box_bs.read(asrt.header.box_size * 8)
|
||||||
|
|
||||||
|
asrt_bs_box.pos += 8
|
||||||
|
update_flag = asrt_bs_box.read("uint:24")
|
||||||
|
asrt.update = True if update_flag == 1 else False
|
||||||
|
|
||||||
|
asrt.quality_segment_url_modifiers = cls._read_count_and_string_table(asrt_bs_box)
|
||||||
|
|
||||||
|
asrt.segment_run_table_entries = []
|
||||||
|
segment_count = asrt_bs_box.read("uint:32")
|
||||||
|
|
||||||
|
for _ in six.range(0, segment_count):
|
||||||
|
first_segment = asrt_bs_box.read("uint:32")
|
||||||
|
fragments_per_segment = asrt_bs_box.read("uint:32")
|
||||||
|
asrt.segment_run_table_entries.append(
|
||||||
|
SegmentRunTable.SegmentRunTableEntry(first_segment=first_segment,
|
||||||
|
fragments_per_segment=fragments_per_segment) )
|
||||||
|
return asrt
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_afrt(cls, box_bs):
|
||||||
|
""" Parse afrt / Fragment Run Table Box """
|
||||||
|
|
||||||
|
afrt = FragmentRunTable()
|
||||||
|
afrt.header = cls._read_box_header(box_bs)
|
||||||
|
# read the entire box in case there's padding
|
||||||
|
afrt_bs_box = box_bs.read(afrt.header.box_size * 8)
|
||||||
|
|
||||||
|
afrt_bs_box.pos += 8
|
||||||
|
update_flag = afrt_bs_box.read("uint:24")
|
||||||
|
afrt.update = True if update_flag == 1 else False
|
||||||
|
|
||||||
|
afrt.time_scale = afrt_bs_box.read("uint:32")
|
||||||
|
afrt.quality_fragment_url_modifiers = cls._read_count_and_string_table(afrt_bs_box)
|
||||||
|
|
||||||
|
fragment_count = afrt_bs_box.read("uint:32")
|
||||||
|
|
||||||
|
afrt.fragments = []
|
||||||
|
|
||||||
|
for _ in six.range(0, fragment_count):
|
||||||
|
first_fragment = afrt_bs_box.read("uint:32")
|
||||||
|
first_fragment_timestamp_raw = afrt_bs_box.read("uint:64")
|
||||||
|
|
||||||
|
try:
|
||||||
|
first_fragment_timestamp = datetime.utcfromtimestamp(first_fragment_timestamp_raw/float(afrt.time_scale))
|
||||||
|
except ValueError:
|
||||||
|
# Elemental sometimes create odd timestamps
|
||||||
|
first_fragment_timestamp = None
|
||||||
|
|
||||||
|
fragment_duration = afrt_bs_box.read("uint:32")
|
||||||
|
|
||||||
|
if fragment_duration == 0:
|
||||||
|
discontinuity_indicator = afrt_bs_box.read("uint:8")
|
||||||
|
else:
|
||||||
|
discontinuity_indicator = None
|
||||||
|
|
||||||
|
frte = FragmentRunTable.FragmentRunTableEntry(first_fragment=first_fragment,
|
||||||
|
first_fragment_timestamp=first_fragment_timestamp,
|
||||||
|
fragment_duration=fragment_duration,
|
||||||
|
discontinuity_indicator=discontinuity_indicator)
|
||||||
|
afrt.fragments.append(frte)
|
||||||
|
return afrt
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_mdat(box_bs, header):
|
||||||
|
""" Parse afrt / Fragment Run Table Box """
|
||||||
|
|
||||||
|
mdat = MediaDataBox()
|
||||||
|
mdat.header = header
|
||||||
|
mdat.payload = box_bs.read(mdat.header.box_size * 8).bytes
|
||||||
|
return mdat
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_time_field(bs, scale):
|
||||||
|
timestamp = bs.read("uint:64")
|
||||||
|
return datetime.utcfromtimestamp(timestamp / float(scale) )
|
32
utils.py
Normal file
32
utils.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import mp4parse
|
||||||
|
import codecs
|
||||||
|
import widevine_pssh_pb2
|
||||||
|
import base64
|
||||||
|
|
||||||
|
def extract_kid(mp4_file):
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
mp4_file : str
|
||||||
|
MP4 file with a PSSH header
|
||||||
|
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
String
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
boxes = mp4parse.F4VParser.parse(filename=mp4_file)
|
||||||
|
for box in boxes:
|
||||||
|
if box.header.box_type == 'moov':
|
||||||
|
pssh_box = next(x for x in box.pssh if x.system_id == "edef8ba979d64acea3c827dcd51d21ed")
|
||||||
|
hex = codecs.decode(pssh_box.payload, "hex")
|
||||||
|
|
||||||
|
pssh = widevine_pssh_pb2.WidevinePsshData()
|
||||||
|
pssh.ParseFromString(hex)
|
||||||
|
content_id = base64.b16encode(pssh.content_id)
|
||||||
|
return content_id.decode("utf-8")
|
||||||
|
|
||||||
|
# No Moof or PSSH header found
|
||||||
|
return None
|
141
widevine_pssh_pb2.py
Normal file
141
widevine_pssh_pb2.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# source: widevine_pssh.proto
|
||||||
|
|
||||||
|
import sys
|
||||||
|
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||||
|
from google.protobuf import descriptor as _descriptor
|
||||||
|
from google.protobuf import message as _message
|
||||||
|
from google.protobuf import reflection as _reflection
|
||||||
|
from google.protobuf import symbol_database as _symbol_database
|
||||||
|
from google.protobuf import descriptor_pb2
|
||||||
|
# @@protoc_insertion_point(imports)
|
||||||
|
|
||||||
|
_sym_db = _symbol_database.Default()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||||
|
name='widevine_pssh.proto',
|
||||||
|
package='',
|
||||||
|
serialized_pb=_b('\n\x13widevine_pssh.proto\"\xfc\x01\n\x10WidevinePsshData\x12.\n\talgorithm\x18\x01 \x01(\x0e\x32\x1b.WidevinePsshData.Algorithm\x12\x0e\n\x06key_id\x18\x02 \x03(\x0c\x12\x10\n\x08provider\x18\x03 \x01(\t\x12\x12\n\ncontent_id\x18\x04 \x01(\x0c\x12\x12\n\ntrack_type\x18\x05 \x01(\t\x12\x0e\n\x06policy\x18\x06 \x01(\t\x12\x1b\n\x13\x63rypto_period_index\x18\x07 \x01(\r\x12\x17\n\x0fgrouped_license\x18\x08 \x01(\x0c\"(\n\tAlgorithm\x12\x0f\n\x0bUNENCRYPTED\x10\x00\x12\n\n\x06\x41\x45SCTR\x10\x01')
|
||||||
|
)
|
||||||
|
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_WIDEVINEPSSHDATA_ALGORITHM = _descriptor.EnumDescriptor(
|
||||||
|
name='Algorithm',
|
||||||
|
full_name='WidevinePsshData.Algorithm',
|
||||||
|
filename=None,
|
||||||
|
file=DESCRIPTOR,
|
||||||
|
values=[
|
||||||
|
_descriptor.EnumValueDescriptor(
|
||||||
|
name='UNENCRYPTED', index=0, number=0,
|
||||||
|
options=None,
|
||||||
|
type=None),
|
||||||
|
_descriptor.EnumValueDescriptor(
|
||||||
|
name='AESCTR', index=1, number=1,
|
||||||
|
options=None,
|
||||||
|
type=None),
|
||||||
|
],
|
||||||
|
containing_type=None,
|
||||||
|
options=None,
|
||||||
|
serialized_start=236,
|
||||||
|
serialized_end=276,
|
||||||
|
)
|
||||||
|
_sym_db.RegisterEnumDescriptor(_WIDEVINEPSSHDATA_ALGORITHM)
|
||||||
|
|
||||||
|
|
||||||
|
_WIDEVINEPSSHDATA = _descriptor.Descriptor(
|
||||||
|
name='WidevinePsshData',
|
||||||
|
full_name='WidevinePsshData',
|
||||||
|
filename=None,
|
||||||
|
file=DESCRIPTOR,
|
||||||
|
containing_type=None,
|
||||||
|
fields=[
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='algorithm', full_name='WidevinePsshData.algorithm', index=0,
|
||||||
|
number=1, type=14, cpp_type=8, label=1,
|
||||||
|
has_default_value=False, default_value=0,
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='key_id', full_name='WidevinePsshData.key_id', index=1,
|
||||||
|
number=2, type=12, cpp_type=9, label=3,
|
||||||
|
has_default_value=False, default_value=[],
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='provider', full_name='WidevinePsshData.provider', index=2,
|
||||||
|
number=3, type=9, cpp_type=9, label=1,
|
||||||
|
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='content_id', full_name='WidevinePsshData.content_id', index=3,
|
||||||
|
number=4, type=12, cpp_type=9, label=1,
|
||||||
|
has_default_value=False, default_value=_b(""),
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='track_type', full_name='WidevinePsshData.track_type', index=4,
|
||||||
|
number=5, type=9, cpp_type=9, label=1,
|
||||||
|
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='policy', full_name='WidevinePsshData.policy', index=5,
|
||||||
|
number=6, type=9, cpp_type=9, label=1,
|
||||||
|
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='crypto_period_index', full_name='WidevinePsshData.crypto_period_index', index=6,
|
||||||
|
number=7, type=13, cpp_type=3, label=1,
|
||||||
|
has_default_value=False, default_value=0,
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
_descriptor.FieldDescriptor(
|
||||||
|
name='grouped_license', full_name='WidevinePsshData.grouped_license', index=7,
|
||||||
|
number=8, type=12, cpp_type=9, label=1,
|
||||||
|
has_default_value=False, default_value=_b(""),
|
||||||
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
|
is_extension=False, extension_scope=None,
|
||||||
|
options=None),
|
||||||
|
],
|
||||||
|
extensions=[
|
||||||
|
],
|
||||||
|
nested_types=[],
|
||||||
|
enum_types=[
|
||||||
|
_WIDEVINEPSSHDATA_ALGORITHM,
|
||||||
|
],
|
||||||
|
options=None,
|
||||||
|
is_extendable=False,
|
||||||
|
extension_ranges=[],
|
||||||
|
oneofs=[
|
||||||
|
],
|
||||||
|
serialized_start=24,
|
||||||
|
serialized_end=276,
|
||||||
|
)
|
||||||
|
|
||||||
|
_WIDEVINEPSSHDATA.fields_by_name['algorithm'].enum_type = _WIDEVINEPSSHDATA_ALGORITHM
|
||||||
|
_WIDEVINEPSSHDATA_ALGORITHM.containing_type = _WIDEVINEPSSHDATA
|
||||||
|
DESCRIPTOR.message_types_by_name['WidevinePsshData'] = _WIDEVINEPSSHDATA
|
||||||
|
|
||||||
|
WidevinePsshData = _reflection.GeneratedProtocolMessageType('WidevinePsshData', (_message.Message,), dict(
|
||||||
|
DESCRIPTOR = _WIDEVINEPSSHDATA,
|
||||||
|
__module__ = 'widevine_pssh_pb2'
|
||||||
|
# @@protoc_insertion_point(class_scope:WidevinePsshData)
|
||||||
|
))
|
||||||
|
_sym_db.RegisterMessage(WidevinePsshData)
|
||||||
|
|
||||||
|
|
||||||
|
# @@protoc_insertion_point(module_scope)
|
Loading…
x
Reference in New Issue
Block a user