Last active
August 4, 2020 20:20
-
-
Save brokenwindupdoll/f77f94cc39cc8bf4b836470df221c661 to your computer and use it in GitHub Desktop.
A rough script to wrap Adobe HyperDrive apps into pkgs for easier use in Munki
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# A rough script to wrap Adobe HyperDrive apps into pkgs for easier use in Munki. | |
# | |
# Requires at least AUSST 4. You must sync updates to a locally accessible file system for this script to work. | |
# | |
# Does not import into Munki by itself. You will need to manually copy the plist and DMG files into a repository. | |
# | |
# The package installers that are created contain an optional "install" item to remove previous versions. | |
# The munkiRemoveOlderVersions switch will create an option override to run this on install. | |
# | |
# The package installers do not contain the common Adobe support files (the CC app, AAM, HDInstaller, etc). If you | |
# wish to use these apps to install on a computer that does not have Adobe apps, you will need to use the Creative | |
# Cloud Packager to create a "base" package that contains nothing but the default-selected "Creative Cloud" option, | |
# then set that as a Munki Required package in the configuration variables below. If you are going to use these | |
# packages as nothing more than updates for already CCP-packaged apps, then there is nothing extra you need to do. | |
### Configuration variables. Set to what works best | |
# Path to the ACC folder that is synced by AUSST. | |
adobe_ACC_path = '/Volumes/Adobe/UpdateServer/ACC' | |
# Folder to output installer packages into. | |
output_path = "/temp/Adobe CC apps" | |
# Filter for Adobe SAP codes to build. Leave as a 0 length list to pull all updates. | |
products_to_build = [] | |
# Only build the latest version of applications. | |
latest_only = False | |
# Extra packages to add to the Munki Requires tag. Leave as a 0 length list if not using. | |
munkiRequired = ["Adobe CC Base"] | |
# Developer tag in Munki | |
munkiVendor = "Adobe" | |
# The folder location you plan to put the files on Munki. Leave as a blank string if going in the root. | |
munkiAppFolder = "Applications/Adobe" | |
# Post install script lines. #!/bin/bash is added to the top automatically, so don't include it. | |
munkipostinstall = None | |
# Causes Munki to add a custom install option to remove older version of an app. | |
munkiRemoveOlderVersions = True | |
### Imports | |
import os | |
import subprocess | |
import shutil | |
import re | |
import unicodedata | |
import json | |
import zipfile | |
from distutils.version import LooseVersion | |
try: | |
import xml.etree.cElementTree as ET | |
except ImportError: | |
import xml.etree.ElementTree as ET | |
import Foundation as NS | |
### Name formatting functions. Edit as you want them to be. | |
def format_appname(name): | |
# Fix up the app name stored in the app info. We basically reject anything that isn't a letter or a space (using Unicode). This is more to my preferences than anything. | |
appname = u''.join([c for c in unicode(name) if unicodedata.category(c) in set(('Lu', 'Ll', 'Zs'))]).strip() | |
appname = re.sub(" CC.*", "", appname) # trim off CC and anything following it | |
appname = appname.replace("Adobe ","") # trim off Adobe itself, as we add that in another place | |
return appname | |
def format_displayname(name): | |
displayname = ("Adobe " + name.replace("(","").replace(")","")) | |
return displayname | |
def format_pkgid(name, version): | |
pkgid = ("com.adobe." + name.replace(" ", "") + "." + version).lower() | |
return pkgid | |
def format_munkiname(name): | |
# Create the Munki name. I prefer spaces, but you can remove them if you wish | |
munkiname = ("Adobe " + name) | |
return munkiname | |
### Scripts as strings | |
_adobeHDInstaller = """\ | |
#!/bin/bash | |
# Will silently install Adobe HD installers from an XML manifest. | |
WD="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | |
cd "$WD" | |
"/Applications/Utilities/Adobe Creative Cloud/HDCore/Setup" --install=1 --driverXML="./Driver.xml" | |
exit $?""" | |
_adobeHDUninstall = """\ | |
# The new Hyper Drive installer has a much nicer uninstall method. | |
SAP="adobeSAP" | |
baseVer="adobeVer" | |
platform="adobePlat" | |
for aVer in $baseVer | |
do | |
"/Applications/Utilities/Adobe Creative Cloud/HDCore/Setup" --uninstall=1 --sapCode=$SAP --baseVersion=$aVer --platform=$platform --deleteUserPreferences=true | |
done""" | |
_adobeRIBUninstall = """\ | |
adobeCode="adobeRIB" | |
# This is the boilerplate XML to automate the setup app. | |
remove='<?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
<Deployment> | |
<Properties> | |
<Property name="edtWorkflow">1</Property> | |
<Property name="mediaSignature">adobeGUID</Property> | |
<Property name="uninstall">1</Property> | |
</Properties> | |
<Payloads> | |
<Payload adobeCode="adobeGUID"> | |
<Action>remove</Action> | |
</Payload> | |
</Payloads> | |
</Deployment>' | |
for aCode in $adobeCode | |
do | |
if [ -e "/Library/Application Support/Adobe/Uninstall/$aCode.db" ]; then | |
TempDir=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'` | |
echo `echo $remove | sed "s/adobeGUID/$aCode/g"` > $TempDir/remove.xml | |
"/Library/Application Support/Adobe/OOBE/PDApp/DECore/Setup.app/Contents/MacOS/Setup" --deploymentFile="$TempDir/remove.xml" --com.Adobe.PDApp.ignore_aam --skipProcessChecks=1 | |
rm -rf $TempDir | |
fi | |
done""" | |
_munkiUninstall = """\ | |
#!/bin/bash | |
# The new Hyper Drive installer has a much nicer uninstall method. | |
SAP="adobeSAP" | |
baseVer="adobeVer" | |
platform="adobePlat" | |
"/Applications/Utilities/Adobe Creative Cloud/HDCore/Setup" --uninstall=1 --sapCode=$SAP --baseVersion=$baseVer --platform=$platform --deleteUserPreferences=true | |
exit $?""" | |
### Functions and classes | |
def main(): | |
xml_applist = import_xmldata(os.path.join(adobe_ACC_path, "services/ffc/products/ffc.xml")) | |
applist = filter_apps(xml_applist, filters=products_to_build, latest_only=latest_only) | |
build_packages(applist, output_path=output_path) | |
class AdobeApp(object): | |
def __init__(self): | |
self.id = None | |
self.name = None | |
self.displayName = None | |
self.version = None | |
self.platform = None | |
self.baseVersion = None | |
self.minOS = None | |
self.maxOS = None | |
self.dependencies = None | |
self.references = None | |
self.apptype = None | |
self.previousVersions = None | |
def import_xmldata(xmlpath): | |
# Build application list | |
print "Reading XML data ..." | |
try: | |
ffc_xml = ET.ElementTree(file=xmlpath) | |
except Exception as err: | |
print ("Could not open Adobe XML data file at: " + xmlpath) | |
raise err | |
xml_applist = [] | |
# The extra step of looking at the channel instead of just XPath-ing to products is so we can filter on it later | |
for channel in ffc_xml.getroot().findall("./channel"): | |
for product in channel.findall("./products/product"): | |
# We only care about OSX for now | |
for plat in [item for item in product.findall("./platforms/platform") if "osx" in item.attrib['id']]: | |
if plat.findall("./languageSet[@packageType='hdPackage']"): | |
info = plat.find("./languageSet") | |
app = AdobeApp() | |
app.id = product.attrib['id'].upper() | |
app.name = format_appname(product.find("./displayName").text) | |
app.displayName = format_displayname(product.find("./displayName").text) | |
app.version = product.attrib['version'] | |
app.platform = plat.attrib['id'] | |
app.baseVersion = info.attrib['baseVersion'] | |
# We need to know the dependencies of a product, so we can add them to the build package later | |
dependencies = {} | |
deps = info.findall("./dependencies/dependency") | |
if deps: | |
for item in deps: | |
dependencies.update({ | |
item.find("./sapCode").text: item.find("./baseVersion").text | |
}) | |
app.dependencies = dependencies | |
# Referenced products are used as part of the "Requires" Munki tag | |
references = {} | |
refs = product.findall("./referencedProducts/referencedProduct") | |
if refs: | |
for item in refs: | |
references.update({ | |
item.find("./sapCode").text: item.find("./version").text | |
}) | |
app.references = references | |
# The OS version supported string, which we need to pull apart. | |
osrange = plat.find("./systemCompatibility/operatingSystem/range") | |
if osrange is not None: | |
app.minOS = osrange.text.split("-")[0] or None | |
app.maxOS = osrange.text.split("-")[1] or None | |
# As a kind of kludge, we treat anything not in the CCM catalog as not a normal app. | |
if channel.attrib['name'] == "ccm": | |
app.apptype = "app" | |
else: | |
app.apptype = "sup" | |
xml_applist.append(app) | |
# We need to link up the dependencies and references to contain the app info itself so that we can refer to it when building packages. | |
# TODO: This might go away and become Munki Requires tags. ie, we would build the STI packages by themselves and then just tag Munki to install them first. I dunno, though. | |
for app in xml_applist: | |
# We only want to use the latest revision of a dependency that goes with a specific base version | |
deps = [] | |
for d,b in app.dependencies.iteritems(): | |
deps.append(get_latestapp(xml_applist, d, b)) | |
app.dependencies = deps | |
refs = [] | |
for d,b in app.references.iteritems(): | |
need = [ver for ver in xml_applist if ver.id == d and ver.version == b] | |
refs.append(need[0]) | |
app.references = refs | |
# Gather up the metadata needed to drive uninstall scripts. | |
oldversions = [] | |
previous = [product for product in ffc_xml.getroot().findall("./channel/products/product") if product.attrib['id'] == app.id] | |
for product in previous: | |
for plat in [item.find("./languageSet") for item in product.findall("./platforms/platform") if "osx" in item.attrib['id']]: | |
if plat.attrib['packageType'] == 'hdPackage': | |
old = { | |
'type': 'HD', | |
'displayName': (format_displayname(product.find("./displayName").text)), | |
'id': app.id, | |
'version': plat.attrib['baseVersion'], | |
'platform': app.platform | |
} | |
elif plat.attrib['packageType'] == 'RIBS': | |
old = { | |
'type': 'RIB', | |
'displayName': (format_displayname(product.find("./displayName").text)), | |
'version': product.attrib['version'], | |
'code': plat.attrib['productCode'] | |
} | |
if LooseVersion(old['version']) < LooseVersion(app.baseVersion): | |
if not [ver for ver in oldversions if ver['version'] == old['version']]: | |
oldversions.append(old) | |
oldversions.sort(key=lambda ver: LooseVersion(ver['version'])) | |
app.previousVersions = oldversions | |
print ("Imported package information for " + str(len(xml_applist)) + " items.") | |
return xml_applist | |
def filter_apps(applist, filters=[], latest_only=latest_only): | |
filtered_applist = [] | |
# We only want to build apps themselves by default, and not the support files on their own. | |
if len(filters) == 0: | |
filters = [x.id for x in applist if x.apptype == "app"] | |
# We need to keep references in mind; if an ID we want needs other items, we need to add that info in the build list. | |
deps = [] | |
for id in filters: | |
deps.append(id.upper()) | |
for d in [ | |
x.id.upper() | |
for depend in applist if depend.id == id.upper() | |
for x in depend.references | |
]: | |
deps.append(d) | |
filters = sorted(set(deps)) | |
# Now to filter the list | |
for app in applist: | |
if app.id not in filters: | |
continue | |
# We need to filter versions, based upon config wants. | |
if latest_only == True: # we only want THE latest version | |
versions = get_latestapp(applist, app.id) | |
else: # otherwise, we get the latest revision of all the base versions | |
versions = get_latestapp(applist, app.id, app.baseVersion) | |
if not versions.version == app.version: | |
continue | |
# If we're here, then we add the app to the list | |
filtered_applist.append(app) | |
return filtered_applist | |
def get_latestapp(applist, sap, baseVersion=None): | |
if baseVersion is None: | |
versions = [ver for ver in applist if ver.id == sap] | |
else: | |
versions = [ver for ver in applist if ver.id == sap and ver.baseVersion == baseVersion] | |
versions.sort(key=lambda ver: LooseVersion(ver.version)) | |
return versions[-1] | |
def build_packages(applist, output_path): | |
print ("Creating packages for " + str(len(applist)) + " applications.") | |
for app in applist: | |
package_fullname = (u"Adobe " + app.name + " " + app.version) | |
work_path = os.path.join(output_path, package_fullname) | |
# Check if we've already made a package. | |
if (os.path.exists(os.path.join(output_path, (package_fullname + ".dmg")))) is False: | |
try: | |
print (" Creating package for " + package_fullname) | |
copy_appsetup(app, work_path) | |
make_appsetup(app, work_path, output_path) | |
except Exception as err: | |
print (" Could not create package for " + package_fullname),err | |
# All the cleanup is here, so that even if something breaks, we should delete it all. | |
finally: | |
if os.path.exists(work_path): | |
try: | |
shutil.rmtree(work_path) | |
except Exception as err: | |
print (" Could not remove folder at: " + work_path),err | |
pkg_path = os.path.join(output_path, "pkg") | |
if os.path.exists(pkg_path): | |
try: | |
shutil.rmtree(pkg_path) | |
except Exception as err: | |
print (" Could not remove folder at: " + un_path),err | |
dmg_path = os.path.join(output_path, "dmg") | |
if os.path.exists(dmg_path): | |
try: | |
shutil.rmtree(dmg_path) | |
except Exception as err: | |
print (" Could not remove folder at: " + dmg_path),err | |
pkg_path = os.path.join(output_path, (package_fullname + ".pkg")) | |
if os.path.exists(pkg_path): | |
try: | |
os.remove(pkg_path) | |
except Exception as err: | |
print (" Could not remove pkg at: " + pkg_path),err | |
# Make Munki | |
dmg = os.path.join(output_path, (package_fullname + ".dmg")) | |
plist = os.path.join(output_path, ((u"Adobe" + app.name.replace(" ","") + "-" + app.version) + ".plist")) | |
if (os.path.exists(dmg)) and (os.path.exists(plist)) is False: | |
make_munkiinfo(app, dmg, plist) | |
def copy_appsetup(app, work_path): | |
# Build base folder | |
if os.path.exists(work_path) is False: | |
try: | |
os.makedirs(work_path) | |
except Exception as err: | |
print (" Could not create folder at: " + work_path) | |
raise err | |
# Get a list of packages from the json files | |
packages = {app.id: os.path.join(app.id, app.version, app.platform, "Application.json")} | |
for dep in app.dependencies: | |
packages.update({dep.id: os.path.join(dep.id, dep.version, dep.platform, "Application.json")}) | |
# Process the files for each package | |
for package,path in packages.iteritems(): | |
if os.path.exists(os.path.join(work_path, package)) is False: | |
try: | |
os.makedirs(os.path.join(work_path, package)) | |
except Exception as err: | |
print (" Could not create folder at: " + os.path.join(work_path, package)) | |
raise err | |
# Copy over the JSON file | |
src = os.path.join(adobe_ACC_path, "services/ffc/packages", path) | |
dst = os.path.join(work_path, package, "Application.json") | |
try: | |
shutil.copy2(src, dst) | |
except Exception as err: | |
print (" Could not create: " + dst) | |
raise err | |
# Read the JSON file to get the package files it needs, as well as the path to find them | |
with open(dst) as fd: | |
appdata = json.load(fd) | |
for item in appdata[u'Packages'][u'Package']: | |
try: | |
# Copy the zips | |
print (" Copying " + (os.path.split(item['Path']))[-1] + " ...") | |
src = os.path.join(adobe_ACC_path, "products", item['Path'].lstrip("/")) | |
dst = os.path.join(work_path, package, (os.path.split(item['Path']))[-1]) | |
shutil.copy2(src, dst) | |
except Exception as err: | |
print (" Could not create: " + dst) | |
raise err | |
# We could ALSO copy the verification file, but the installer doesn't seem to need it. | |
# Create automatic installer | |
with open(os.path.join(work_path, "postinstall"), "w") as fd: | |
fd.write(_adobeHDInstaller) | |
os.chmod(os.path.join(work_path, "postinstall"), 0755) | |
# Build up the XML file | |
driver = ET.Element("DriverInfo") | |
productinfo = ET.SubElement(driver, "ProductInfo") | |
for key, val in dict({ | |
"Name": app.name, | |
"SAPCode": app.id, | |
"CodexVersion": app.version, | |
"Platform": app.platform, | |
"EsdDirectory": (os.path.join("./", app.id)) | |
}).items(): | |
child = ET.Element(key) | |
child.text = str(val) | |
productinfo.append(child) | |
dependencies = ET.SubElement(productinfo, "Dependencies") | |
for d in app.dependencies: | |
sub = ET.Element("Dependency") | |
for key, val in dict({ | |
"SAPCode": d.id, | |
"BaseVersion": d.baseVersion, | |
"EsdDirectory": (os.path.join("./", d.id)) | |
}).items(): | |
child = ET.Element(key) | |
child.text = str(val) | |
sub.append(child) | |
dependencies.append(sub) | |
ET.ElementTree(driver).write(os.path.join(work_path, "Driver.xml"), encoding="utf-8", xml_declaration=True) | |
def make_appsetup(app, appfolder, output_path): | |
pkgid = format_pkgid(app.name, app.version) | |
pkg_tmp = os.path.join(output_path, "pkg") | |
if os.path.exists(pkg_tmp) is False: | |
os.makedirs(pkg_tmp) | |
# Create app pkg | |
pkg = os.path.join(pkg_tmp, (pkgid + ".pkg")) | |
make_pkg(pkgid, appfolder, pkg) | |
# Create previous version uninstaller pkg | |
if len(app.previousVersions) > 0: | |
postinstall = "#!/bin/bash" | |
ribs = [] | |
hds = [] | |
for ver in app.previousVersions: | |
if ver['type'] == "RIB": | |
ribs.append(ver['code']) | |
elif ver['type'] == "HD": | |
hds.append(ver['version']) | |
if ribs: | |
codes = ' '.join(ribs) | |
postinstall = (postinstall + "\n\n" + _adobeRIBUninstall.replace("adobeRIB",codes)) | |
if hds: | |
codes = ' '.join(hds) | |
postinstall = (postinstall + "\n\n" + _adobeHDUninstall.replace("adobeSAP", app.id).replace("adobeVer", codes).replace("adobePlat", app.platform)) | |
un_pkgid = "com.adobe.cc.uninstall" | |
un_path = os.path.join(pkg_tmp, un_pkgid) | |
un_pkg = os.path.join(pkg_tmp, (un_pkgid + ".pkg")) | |
if os.path.exists(un_path) is False: | |
try: | |
os.makedirs(un_path) | |
except Exception as err: | |
print (" Could not create folder at: " + un_path) | |
raise err | |
with open(os.path.join(un_path, "postinstall"), "w") as fd: | |
fd.write(postinstall) | |
os.chmod(os.path.join(un_path, "postinstall"), 0755) | |
make_pkg(un_pkgid, un_path, un_pkg) | |
uninstall = [] | |
uninstall.append(dict( | |
{ | |
"id": un_pkgid, | |
"title": "Remove Previous Versions", | |
"pkg": un_pkg | |
} | |
)) | |
else: | |
uninstall = None | |
# Create pkg distribution xml | |
distxml = ET.Element("installer-gui-script", minSpecVersion="1") | |
ET.SubElement(distxml, "title").text = app.displayName | |
ET.SubElement(distxml, "options", customize="always") | |
outline = ET.SubElement(distxml, "choices-outline") | |
ET.SubElement(outline, "line", choice=pkgid) | |
ET.SubElement(distxml, "choice", id=pkgid, visible="false").append(ET.Element("pkg-ref", id=pkgid)) | |
ET.SubElement(distxml, "pkg-ref", id=pkgid, version="0").text = os.path.split(pkg)[-1] | |
if uninstall: | |
for item in uninstall: | |
ET.SubElement(outline, "line", choice=item['id']) | |
ET.SubElement(distxml, "choice", id=item['id'], title=item['title'], start_selected="false").append(ET.Element("pkg-ref", id=item['id'])) | |
ET.SubElement(distxml, "pkg-ref", id=item['id'], version="0").text = os.path.split(item["pkg"])[-1] | |
distfile = os.path.join(pkg_tmp, (os.path.split(appfolder)[-1] + ".xml")) | |
ET.ElementTree(distxml).write(distfile, encoding="utf-8", xml_declaration=True) | |
# Create pkg distribution | |
distpkg = os.path.join(pkg_tmp, (os.path.split(appfolder)[-1] + ".pkg")) | |
make_pkgdist(distfile, pkg_tmp, distpkg) | |
# Create dmg | |
dmg = os.path.join(output_path, (os.path.split(appfolder)[-1] + ".dmg")) | |
dmg_tmp = os.path.join(output_path, "dmg", os.path.split(appfolder)[-1]) | |
cmd = [ | |
"/usr/bin/hdiutil", | |
"create", | |
"-srcfolder", dmg_tmp, | |
"-format", "UDBZ", | |
dmg | |
] | |
try: | |
print (" Creating DMG file ...") | |
if os.path.exists(dmg_tmp) is False: | |
os.makedirs(dmg_tmp) | |
shutil.move(distpkg, dmg_tmp) | |
subprocess.check_call(cmd) | |
except Exception as err: | |
print "Could not create dmg" | |
raise err | |
def make_pkg(pkgid, source, output): | |
cmd = [ | |
"/usr/bin/pkgbuild", | |
"--identifier", pkgid, | |
"--nopayload", | |
"--scripts", source, | |
output | |
] | |
try: | |
print (" Creating pkg " + output + " ...") | |
subprocess.check_call(cmd) | |
except Exception as err: | |
print ("Could not create " + output) | |
raise err | |
def make_pkgdist(dist, pkgs, output): | |
cmd = [ | |
"/usr/bin/productbuild", | |
"--package-path", pkgs, | |
"--distribution", dist, | |
output | |
] | |
try: | |
print (" Creating pkg " + output + " ...") | |
subprocess.check_call(cmd) | |
except Exception as err: | |
print ("Could not create " + output) | |
raise err | |
def make_munkiinfo(app, dmg, plist): | |
munkiname = format_munkiname(app.name) | |
# Build up the makepkginfo call | |
cmd = [ | |
"/usr/local/munki/makepkginfo", | |
('--name=' + munkiname), | |
('--displayname=' + app.displayName), | |
('--pkgvers=' + app.version), | |
('--developer=' + munkiVendor), | |
'--unattended_install', | |
'--unattended_uninstall', | |
] | |
# Set up the uninstall script with the right items | |
uninstall = _munkiUninstall.replace("adobeSAP", app.id).replace("adobeVer", app.baseVersion).replace("adobePlat", app.platform) | |
# Set up a choices XML if we want to remove previous versions | |
if munkiRemoveOlderVersions == True: | |
munkichoices = [] | |
munkichoices.append(dict( | |
{ | |
"attributeSetting": 1, | |
"choiceAttribute": "selected", | |
"choiceIdentifier": "com.adobe.cc.uninstall" | |
} | |
)) | |
else: | |
munkichoices = None | |
# Set up a postinstall script if wanted | |
if munkipostinstall: | |
postinstall = ("#!/bin/bash\n\n" + munkipostinstall) | |
else: | |
postinstall = None | |
# Inspect the app's json file to get the install path | |
with open(os.path.join(adobe_ACC_path, "services/ffc/packages", app.id, app.version, app.platform, "Application.json")) as fd: | |
appdata = json.load(fd) | |
# Nested regex subs isn't pretty, but eh. Ignore case is used because Adobe is all over with their casing. | |
installpath = re.sub("\[InstallDir\]", | |
(re.sub("\[AdobeProgramFiles\]", "/Applications", appdata[u'InstallDir'][u'value'], flags=re.IGNORECASE)), | |
appdata[u'AppLaunch'], flags=re.IGNORECASE).split(".app")[0] + ".app" | |
# We'll TRY to read out what the bundle info would be. | |
bundle = None | |
zippath = [item[u'Path'] for item in appdata[u'Packages'][u'Package'] if item.get(u'Type', None) == u'core'] | |
if len(zippath) > 0: | |
try: | |
zippath = os.path.join(adobe_ACC_path, "products", zippath[0].lstrip("/")) | |
zipped = zipfile.ZipFile(zippath) | |
# This will try to find the app. I don't know if this will always work. | |
bs = re.compile(".*/" + os.path.split(installpath)[-1].replace("(","\(").replace(")","\)") + "/Contents/Info.plist") | |
for member in zipped.namelist(): | |
if bs.match(member): | |
bundle = zipped.read(member) | |
bundle, kind, err = NS.NSPropertyListSerialization.propertyListWithData_options_format_error_(buffer(bundle), 0, None, None) | |
break | |
except Exception as err: | |
print ("Cannot read zip file " + zippath),err | |
# Now, try to build up that installs item ... | |
installs = list() | |
installs.append(dict({ | |
u'CFBundleIdentifier': bundle[u'CFBundleIdentifier'] or '', | |
u'CFBundleName': bundle[u'CFBundleName'] or '', | |
u'CFBundleShortVersionString': bundle[u'CFBundleShortVersionString'] or app.version, | |
u'CFBundleVersion': bundle[u'CFBundleVersion'] or app.version, | |
u'path': installpath, | |
u'type': u'application', | |
u'version_comparison_key': u'CFBundleShortVersionString' | |
})) | |
#TODO: Generate conflicting processes as well? | |
# Set the proper minimum OS version. I don't believe there will ever be a max OS, but we can fix that up whenever that time comes. | |
if app.minOS is not None: | |
cmd.append(('--minimum_os_version=' + app.minOS)) | |
# Add any required packages | |
required = list() | |
if munkiRequired is not None: | |
for item in munkiRequired: | |
required.append(item) | |
for item in app.references: | |
# TODO: add version reference? | |
required.append(format_munkiname(item.name)) | |
for item in required: | |
cmd.append(('--requires=' + item)) | |
# Finally, add the target DMG | |
cmd.append(dmg) | |
try: | |
print (" Creating Munki plist for " + dmg + " ...") | |
data = subprocess.check_output(cmd) | |
# Now we need to add some bits manually | |
pdata, kind, err = NS.NSPropertyListSerialization.propertyListWithData_options_format_error_(buffer(data), 0, None, None) | |
pdata.update({ | |
u'installer_item_location': (os.path.join(munkiAppFolder, pdata[u'installer_item_location'])), | |
u'uninstallable': True, | |
u'uninstall_method': u'uninstall_script', | |
u'uninstall_script': uninstall, | |
u'installs': installs | |
}) | |
if postinstall: | |
pdata.update({ | |
u'postinstall_script': postinstall | |
}) | |
if munkichoices: | |
pdata.update({ | |
u'installer_choices_xml': munkichoices | |
}) | |
# Write out that plist file | |
pldata, err = NS.NSPropertyListSerialization.dataWithPropertyList_format_options_error_(pdata, NS.NSPropertyListXMLFormat_v1_0, 0, None) | |
with open(plist, "w") as fd: | |
fd.write(unicode(pldata)) | |
except Exception as err: | |
print "Could not create Munki plist" | |
raise err | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment