Skip to content

Instantly share code, notes, and snippets.

@sannies
Last active October 29, 2016 14:14
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sannies/4125325 to your computer and use it in GitHub Desktop.
Save sannies/4125325 to your computer and use it in GitHub Desktop.
extracts keyIds from uvu files, extracts keys from a CSV file, matches the keys to form a json file that can be imported in catsLbas' DSP
import binascii
import json
import struct
import os
import re
# check verisoning
__author__ = 'sannies'
def get_keyid_type_mapping(source, size, key_id_mapping=None, trace='', apid=None):
if not key_id_mapping: key_id_mapping = dict()
read = 0
while read < size:
try:
fourBytes = source.read(4)
if len(fourBytes) < 4:
return key_id_mapping, apid
box_size = struct.unpack_from(">I", fourBytes)[0]
except EOFError:
return key_id_mapping, apid
type = source.read(4)
if box_size == 0:
raise NotImplementedError('size 0 means until the end of the file and this is not yet supported')
elif box_size == 1:
raise NotImplementedError('size 1 means large box. Not supported yet.')
elif type in __stop_atoms:
return key_id_mapping, apid
else:
key_id_mapping, apid = _read_box(source, box_size, type, key_id_mapping, "%s/%s" % (trace, type), apid)
read += box_size
return key_id_mapping, apid
__simple_container = ['moov', 'trak', 'mdia', 'minf', 'stbl', 'sinf', 'schi']
__fullbox_plus4_container = ['stsd']
__fullbox_container = []
__audio_atoms = ['enca', 'mp4a', ]
__video_atoms = ['encv', 'h264', 'mp4v', 'avc1', ]
__stop_atoms = ['moof']
def _read_box(source, size, type, key_id_mapping, trace, apid):
if type in __simple_container:
key_id_mapping, apid = get_keyid_type_mapping(source, size - 8, key_id_mapping, trace, apid)
elif type in __fullbox_container:
source.read(4)
key_id_mapping, apid = get_keyid_type_mapping(source, size - 12, key_id_mapping, trace, apid)
elif type in __fullbox_plus4_container:
source.read(8)
key_id_mapping, apid = get_keyid_type_mapping(source, size - 16, key_id_mapping, trace, apid)
elif type in __audio_atoms:
source.read(28)
key_id_mapping, apid = get_keyid_type_mapping(source, size - 36, key_id_mapping, trace, apid)
elif type in __video_atoms:
source.read(78)
key_id_mapping, apid = get_keyid_type_mapping(source, size - 86, key_id_mapping, trace, apid)
elif type == 'ainf':
source.read(8) # version flags & profile
apid = ""
data = source.read(1)
while data != '\0':
apid += data
data = source.read(1)
elif type == 'tenc':
source.read(8)
keyid = source.read(16)
type = None
for audioType in __audio_atoms:
type = 'audio' if type is None and audioType in trace else type
for videoType in __video_atoms:
type = 'video' if type is None and videoType in trace else type
if key_id_mapping.has_key(binascii.b2a_hex(keyid).upper()):
key_id_mapping[binascii.b2a_hex(keyid).upper()] = 'audio-video'
else:
key_id_mapping[binascii.b2a_hex(keyid).upper()] = type
else:
# skip it
source.read(size - 8)
return key_id_mapping, apid
files = os.listdir(os.getcwd())
csvs = (i for i in files if i.endswith('.csv'))
uvus = (i for i in files if i.endswith('.uvu'))
keys = dict()
for csv in csvs:
with open(os.path.join(os.getcwd(), csv)) as csv_fptr:
content = csv_fptr.read()
keys = dict(
(m.group(1).upper(), m.group(2)) for m in re.finditer('([0-9a-fA-F]{32}).([0-9a-fA-F]{32})', content))
num_keys = len(re.findall('([0-9a-fA-F]{32}).([0-9a-fA-F]{32})', content))
if len(keys.items()) != num_keys:
print "KeyID not unique in Excel sheet" * 30
print "%d keys found. Is it plausible?" % num_keys
objects = []
for uvu in uvus:
with open(os.path.join(os.getcwd(), uvu)) as uvu_fptr:
map, apid = get_keyid_type_mapping(uvu_fptr, os.path.getsize(os.path.join(os.getcwd(), uvu)))
print str(map) + " " + apid
objects += [{'apid': apid,
'cek': [{'key_id': key_id, 'key_type': map.pop(key_id.upper(), "XXXXX"), 'key': keys.pop(key_id.upper(), "XXXXX")} for key_id in
map.keys()]},]
print json.dumps(objects)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment