Created
April 2, 2021 20:24
-
-
Save dlevi309/d5055804ad93975d2786188df623a871 to your computer and use it in GitHub Desktop.
the ipatool script as from xcode
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/sandbox-exec -n no-network /usr/bin/ruby -E UTF-8 -v | |
# -*- coding: utf-8; mode: ruby -*- | |
# | |
# A utility for performing various operations on IPA files. This script is | |
# intended to be invoked by xcodebuild. It is not intended for direct use, | |
# or for invocation in any manner other than through xcodebuild. Any other | |
# use is unsupported. | |
# | |
# Copyright © 2015-2020 Apple Inc. All Rights Reserved. | |
# | |
GC.disable | |
require 'optparse' | |
require 'cfpropertylist' | |
require 'shellwords' | |
require 'pathname' | |
require 'json' | |
require 'fileutils' | |
require 'find' | |
require 'tmpdir' | |
require 'ostruct' | |
require 'digest' | |
require 'sqlite3' | |
require 'pp' | |
require 'open3' | |
require 'set' | |
$stderr.puts "OS X #{`sw_vers -productVersion`.strip} #{`sw_vers -buildVersion`.strip}" | |
$stderr.puts "ENV: #{ENV.inspect}" | |
$stderr.puts "ARGV: #{ARGV.inspect}" | |
# | |
# Library | |
# | |
# If x fails, print message and backtrace, then exit(1) | |
def assert(x, message = nil) | |
return if x | |
$stderr.puts "Assertion failed: #{message}" | |
$stderr.puts caller | |
exit(1) | |
end | |
class Object | |
# Useful as a way to add type annotations to code. | |
def assert_kind_of(t, message = nil) | |
assert(self.kind_of?(t), "#{self.class} is not kind of #{t}: #{message}") | |
end | |
# Useful as a way to add type annotations to code. | |
def as(t, message = nil) | |
assert_kind_of(t, message) | |
self | |
end | |
# Useful as a way to add type annotations to code. | |
def optional_as(t, message = nil) | |
assert_kind_of(t, message) | |
self | |
end | |
# Useful as a way to add type annotations to code. | |
def assert_array_of(t, message = nil) | |
assert_kind_of(Array, message) | |
each{|x| x.assert_kind_of(t, message) } | |
end | |
end | |
class NilClass | |
# Useful as a way to add type annotations to code. | |
def optional_as(t, message = nil) | |
return nil | |
end | |
end | |
class Future | |
def initialize(&f) | |
@thread = Thread.new { | |
begin | |
@result = f.call | |
@ex = nil | |
rescue => ex | |
@result = nil | |
@ex = ex | |
end | |
} | |
end | |
def await | |
@thread.join | |
raise @ex if @ex != nil | |
return @result # nil is a valid result | |
end | |
end | |
class Hash | |
def self.from_tuples(array) | |
ret = self.new | |
for k,v in array | |
ret[k] = v | |
end | |
return ret | |
end | |
end | |
module Enumerable | |
def group_using(&eql) | |
groups = [] | |
for x in self | |
done = false | |
for g in groups | |
if eql.call(g.first, x) | |
g << x | |
done = true | |
break | |
end | |
end | |
next if done | |
groups << [x] | |
end | |
return groups | |
end | |
end | |
class NilClass | |
def nil_if_empty | |
return nil | |
end | |
end | |
module Enumerable | |
def nil_if_empty | |
return nil if self.size == 0 | |
return self | |
end | |
end | |
class String | |
def nil_if_empty | |
return nil if self.size == 0 | |
return self | |
end | |
end | |
class String | |
def indent(level = 1) | |
prefix = " " * level | |
return self.lines. | |
map{|l| prefix + l}. | |
join('') #lines retains \n | |
end | |
# We still support Ruby 2.0, so we need a workaround for unicode equivalence | |
def unicode_equal?(other) | |
return CmdSpec.new(locate_tool("python"), ["-c", "import sys; import unicodedata; print(unicodedata.normalize('NFC', sys.argv[1].decode('utf-8')) == unicodedata.normalize('NFC', sys.argv[2].decode('utf-8')))", self, other]).run(0, false, true).strip == "True" | |
end | |
end | |
class Exception | |
def to_log_s | |
return "#{self.inspect}\n#{self.backtrace.join("\n").indent}" | |
end | |
end | |
# | |
# Event Tracing Output | |
# During the execution of `ipatool`, various status events are emitted to the `--event-log` file. | |
# | |
class EventLogStream | |
JsonOutputFormatting = { indent: '', space: '' } | |
attr :nextTaskId | |
attr :tasks | |
attr :pipe | |
def initialize(pipe) | |
@pipe = pipe | |
@nextTaskId = 1 | |
@tasks = [] | |
end | |
def logEvent(type, label, toolInfo = nil, extraInfo = nil, taskId = nil) | |
timestamp = DateTime.now.iso8601(3) | |
taskId = taskId || nextTaskId | |
if type == :startTask | |
parentId = @tasks.last | |
@nextTaskId += 1 | |
@tasks.push taskId | |
end | |
if type == :endTask | |
@tasks.pop | |
end | |
# Please note that this format is a contract between multiple clients, including the `IPAToolLib` package in HLT and the App Store's logging mechanism. | |
data = { event_type: type.to_s, | |
label: label, | |
parent_id: parentId, | |
task_id: taskId, | |
tool_info: toolInfo, | |
# Force the extra info to be a dictionary of string/string pairs. | |
extra_info: (extraInfo || {}).map{ |k,v| [k.to_s, v.to_s] }.to_h, | |
timestamp: timestamp }.reject { |k, v| v.nil? || v.to_s.empty? } | |
json = JSON.generate(data, JsonOutputFormatting) | |
size = json.length | |
OPTIONS.eventLogPipe.puts "#{size}\n#{json}" if OPTIONS.eventLogPipe | |
OPTIONS.eventLogPipe.flush | |
return taskId | |
end | |
end | |
# Logs out tracing information for a wrapped task with :startTask and :endTask markers. | |
def EmitEventLogStartTrace(label, extraInfo = nil, toolInfo = nil) | |
OPTIONS.eventLog&.logEvent(:startTask, label, toolInfo, extraInfo) | |
end | |
def EmitEventLogStopTrace(taskId, label, extraInfo = nil, toolInfo = nil) | |
OPTIONS.eventLog&.logEvent(:endTask, label, toolInfo, extraInfo, taskId) | |
end | |
def log(&f) | |
if OPTIONS.verbosity > 0 | |
msg = f.call | |
# #write is atomic, #puts is not | |
$stderr.write "#{msg}\n" | |
end | |
end | |
class LoggingFileUtils | |
def rm_f(path) | |
log { "$ rm -f #{path.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (rm)", nil, { :tool => "rm", :args => ["-f", path.to_s] }) | |
FileUtils.rm_f(path) | |
EmitEventLogStopTrace(taskId, "File Operation (rm)") | |
end | |
def rm_r(path) | |
log { "$ rm -r #{path.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (rm)", nil, { :tool => "rm", :args => ["-r", path.to_s] }) | |
FileUtils.rm_r(path) | |
EmitEventLogStopTrace(taskId, "File Operation (rm)") | |
end | |
def mkdir_p(path) | |
log { "$ mkdir -p #{path.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (mkdir)", nil, { :tool => "mkdir", :args => ["-p", path.to_s] }) | |
FileUtils.mkdir_p(path) | |
EmitEventLogStopTrace(taskId, "File Operation (mkdir)") | |
end | |
def cp(a, b) | |
log { "$ cp #{a.to_s.shellescape} #{b.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (cp)", nil, { :tool => "cp", :args => [a.to_s, b.to_s] }) | |
FileUtils.cp(a, b) | |
EmitEventLogStopTrace(taskId, "File Operation (cp)") | |
end | |
def cp_r(a, b) | |
log { "$ cp -r #{a.to_s.shellescape} #{b.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (cp)", nil, { :tool => "cp", :args => ["-r", a.to_s, b.to_s] }) | |
FileUtils.cp_r(a, b) | |
EmitEventLogStopTrace(taskId, "File Operation (cp)") | |
end | |
def mv(a, b) | |
log { "$ mv #{a.to_s.shellescape} #{b.to_s.shellescape}" } | |
taskId = EmitEventLogStartTrace("File Operation (mv)", nil, { :tool => "mv", :args => ["-f", a.to_s, b.to_s] }) | |
FileUtils.mv(a, b) | |
EmitEventLogStopTrace(taskId, "File Operation (mv)") | |
end | |
end | |
FS = LoggingFileUtils.new | |
# Raises if it couldn't find it. | |
def locate_tool(name, additional_paths = []) | |
return name if Pathname.new(name).absolute? | |
path = (additional_paths||[]).map{|x|x.to_s}.join(File::PATH_SEPARATOR) + File::PATH_SEPARATOR + (ENV["PATH"] || "") | |
paths = path.split(File::PATH_SEPARATOR).select{|p| p.nil_if_empty } | |
candidates = paths.map{|p| p + "/" + name} | |
result = candidates.detect{|p| File.executable?(p) } | |
raise "Couldn't locate #{name.shellescape} in #{paths.shelljoin}" unless result.nil_if_empty | |
return result | |
end | |
class CmdSpec | |
attr_accessor :bin | |
attr_accessor :args | |
attr_accessor :env | |
attr_accessor :chdir | |
def initialize(bin, args, env = nil, chdir = nil) | |
assert(bin) | |
@bin = locate_tool(bin.to_s) | |
@args = (args || []).map{|x|x.to_s} | |
@env = Hash.from_tuples((env || {}).map{|k,v| [k.to_s, v.to_s] }) | |
@chdir = chdir.to_s | |
end | |
def to_s | |
chdir_s = if self.chdir.nil_if_empty then "cd #{self.chdir.shellescape} && " else "" end | |
env_s = if self.env.nil_if_empty then self.env.map{|k,v| "#{k.shellescape}=#{v.shellescape} " }.join(' ') else "" end | |
args_s = if self.args.nil_if_empty then " #{self.args.shelljoin}" else "" end | |
return "#{chdir_s}#{env_s}#{bin.shellescape}#{args_s}" | |
end | |
def cache_key | |
return "#{bin}#{args}#{env}#{chdir}" | |
end | |
class NonZeroExitException < StandardError | |
attr_reader :cmd_spec | |
attr_reader :out_str | |
attr_reader :err_str | |
attr_reader :status | |
def initialize(cmd_spec, out_str, err_str, status) | |
@cmd_spec = cmd_spec | |
@out_str = out_str | |
@err_str = err_str | |
@status = status | |
end | |
end | |
# Raises NonZeroExitException if the process exits with non-zero | |
def run(log_indent = 0, verbose_log = true, use_cache = false) | |
if use_cache | |
$CMD_SPEC_CACHE ||= Hash.new | |
key = self.cache_key | |
cached_value = $CMD_SPEC_CACHE[key] | |
if cached_value.nil? | |
cached_value = self.run(log_indent, verbose_log, false) | |
$CMD_SPEC_CACHE[key] = cached_value | |
end | |
return cached_value | |
end | |
opts = {} | |
opts[:chdir] = chdir if self.chdir.nil_if_empty | |
toolName = File.basename(@bin) | |
taskId = EmitEventLogStartTrace("External Tool (#{toolName})", nil, { :tool => @bin, :args => @args, :env => @env, :chdir => @chdir }) | |
out_str, err_str, status = Open3.capture3(self.env, self.bin, *self.args, opts) | |
EmitEventLogStopTrace(taskId, "External Tool (#{toolName})") | |
log_str = nil | |
get_log_str = lambda { | |
log_str ||= "$ #{self}\nStatus: #{status}\nStdout:\n#{out_str.indent}\nStderr:\n#{err_str.indent}\n" | |
} | |
log { get_log_str.call.indent(log_indent+1) } if verbose_log | |
unless status.success? | |
raise NonZeroExitException.new(self, out_str, err_str, status), get_log_str.call | |
end | |
return out_str | |
end | |
end | |
# Resolves and stores a dependency graph for mach-o linkage. It stores an array of MachOImage dependencies per MachOImage. | |
class LinkageGraph | |
# var graph : Hash <MachOImage -> [MachOImage]> | |
def initialize | |
@graph = Hash.new | |
end | |
def to_s(parent_path) | |
img_to_s = lambda {|img| | |
"#{img.machoFile.path.relative_path_from(parent_path)} #{img.arch}" | |
} | |
return @graph.each.map{|k, vs| | |
"#{img_to_s.call(k)} ->\n#{vs.map{|v|img_to_s.call(v)}.join("\n").indent}" | |
}.join("\n") | |
end | |
def [](x) | |
return @graph[x] | |
end | |
def []=(x, y) | |
return @graph[x] = y | |
end | |
# Resolve MachOImage dependencies for image. | |
# | |
# rpathStack : [Pathname] -- Holds the linker's @rpath stack. This accumulates as we chase further down the dependency tree. | |
# machOsByRealPath : { Pathname : FSMachOFileNode } -- Maps MachO files by real path so that we can resolve linkage paths to objects. | |
# executablePath : Pathname -- Current substitution for @executable_path. | |
# level : Int -- Log indentation level. This accumulates as we chase further down the dependency tree. | |
def resolveLoadedDylibs(image, rpathFallbackStack, rpathStack = [], machOsByRealPath = {}, executablePath = nil, level = 0) | |
# Did we already process this image? | |
return if self[image] | |
# If this is an executable, it becomes the new @executable_path | |
executablePath = if image.type == "MH_EXECUTE" then image.machoFile.path.parent else executablePath end | |
executablePath.assert_kind_of(Pathname) unless executablePath.nil? | |
# Add its rpaths to the rpath stack | |
rpaths = image.rpaths | |
rpaths.assert_array_of(String) | |
rpathStack = rpaths.map{|rpath| | |
result = nil | |
begin | |
result = _expandPath(rpath, image, executablePath, [], rpathFallbackStack) | |
rescue RuntimeError, SystemCallError, IOError => e | |
EmitWarning(__LINE__, "Failed to resolve rpath for #{image.machoFile.path.basename}: #{e}") | |
result = nil | |
end | |
result | |
}.compact + rpathStack | |
rpathStack.assert_array_of(Pathname) | |
# Resolve its loaded libraries to MachOImage objects | |
dylibs = image.loadedDylibs | |
dylibs.assert_array_of(String) | |
resolvedDylibs = dylibs. | |
select{|n| !n.start_with?('/') }. # Skip absolute paths | |
map{ |dylibName| | |
resolvedImage = nil | |
begin | |
# Find a matching rpath to resolve against | |
expandedPath = _expandPath(dylibName, image, executablePath, rpathStack, rpathFallbackStack).realpath | |
# Find a model object | |
resolvedMachO = machOsByRealPath[expandedPath] || (raise "Could not find MachO for #{expandedPath}") | |
# Pick the best image/slice to link against | |
resolvedImage = resolvedMachO.linkableImageForArch(image.arch) || (raise "Could not find image to link for #{image.arch} in #{expandedPath}") | |
rescue RuntimeError, SystemCallError, IOError => e | |
EmitWarning(__LINE__, "Failed to resolve linkage dependency #{image.machoFile.path.basename} #{image.arch} -> #{dylibName}: #{e}") | |
resolvedImage = nil | |
end | |
resolvedImage | |
}.compact | |
resolvedDylibs.assert_array_of(MachOImage) | |
self[image] = resolvedDylibs | |
# log { "LinkageGraph << #{image} ->\n#{resolvedDylibs.join("\n").indent}".indent(level) } | |
# Follow each dependency | |
resolvedDylibs.each{|img| resolveLoadedDylibs(img, rpathFallbackStack, rpathStack, machOsByRealPath, executablePath, level + 1) } | |
end | |
# Expand @rpath/path -> /absolute/path. | |
# | |
# path : String - The linkage path to expand. | |
# image : MachOImage - The substitution for @loader_path. | |
# executablePath : Pathname - The substitution for @executable_path. | |
# rpathStack : [Pathname] - Stack of @rpath substitutions. | |
def _expandPath(path, image, executablePath, rpathStack, rpathFallbackStack) | |
s = path.dup | |
if s.start_with?("/") | |
# Need to use sdkPath.to_s because we want to make sure that /a + /b = /a/b instead of /b. | |
s = image.platform.sdkPath.to_s + s | |
end | |
if s =~ /^@executable_path/ | |
raise "Could not resolve @executable_path for #{path} from #{image.machoFile.path.basename}" unless executablePath | |
s.gsub!(/^@executable_path/, executablePath.to_s) | |
end | |
s.gsub!(/^@loader_path/, image.machoFile.path.parent.to_s) | |
if s =~ /^@rpath/ | |
all_rpaths = rpathStack + [LinkageGraph.parent_path_for_macho_binary_or_framework(image.machoFile.path)] + rpathFallbackStack | |
s = all_rpaths.map{|rp| | |
rp.assert_kind_of(Pathname) | |
s.gsub(/^@rpath/, rp.to_s) | |
}.find{|p| | |
File.exist?(p) | |
} | |
raise "Could not resolve @rpath in #{path} from #{image.machoFile.path.basename}" if !s | |
end | |
return Pathname(s) | |
end | |
def self.parent_path_for_macho_binary_or_framework(binaryPath) | |
parent = binaryPath.parent.realpath | |
parent = parent.parent if parent.extname.downcase == ".framework" | |
return parent | |
end | |
end | |
class CPUArchitecture | |
# Name, e.g. arm64 | |
attr_reader :name | |
# An image with this arch can link the following archs into its address space | |
attr_reader :linkable_architectures | |
# A CPU of this arch can also run the following archs, in preference order | |
attr_reader :runnable_architectures | |
def initialize(name, linkable_architectures, runnable_architectures) | |
@name = name | |
@linkable_architectures = linkable_architectures | |
@runnable_architectures = runnable_architectures | |
end | |
def self.all | |
return CPU_ARCHITECTURE_ALL | |
end | |
def self.get(arch) | |
return self.all.find{|a| a.name == arch } || (raise "Unknown arch #{arch}") | |
end | |
end | |
CPU_ARCHITECTURE_ALL = | |
[ | |
CPUArchitecture.new("arm64e", %w(arm64e), %w(arm64e arm64 arm64_32 armv7s armv7)), | |
CPUArchitecture.new("arm64_32", %w(arm64_32), %w(arm64_32)), | |
CPUArchitecture.new("arm64", %w(arm64), %w(arm64 armv7s armv7)), | |
CPUArchitecture.new("armv7s", %w(armv7s armv7), %w(armv7s armv7)), | |
CPUArchitecture.new("armv7k", %w(armv7k), %w(armv7k)), | |
CPUArchitecture.new("armv7", %w(armv7), %w(armv7)), | |
] | |
# Models the arg for --create-thinned and --skip-thinned | |
class ThinningVariantCLISpec | |
# A device model identifier, product code, traits' artworkDevIdiom | |
attr :device # :: String? | |
attr :deploymentTarget # :: Gem::Version? | |
def initialize(device, deploymentTarget) | |
device.optional_as(String) | |
@device = device | |
deploymentTarget.optional_as(Gem::Version) | |
@deploymentTarget = deploymentTarget | |
end | |
def to_s | |
deviceStr = device || "*" | |
deploymentTargetStr = if deploymentTarget then "*" else deploymentTarget.to_s end | |
return [deviceStr, deploymentTargetStr].join("::") | |
end | |
# Parses a command-line arg | |
def self.fromArg(str) | |
return new(nil, nil) if str == "all" | |
parts = str.split('::') | |
if parts.size == 1 | |
return fromArg(parts[0]) | |
elsif parts.size == 2 | |
device = if ["all", "*"].include?(parts[0]) then nil else parts[0] end | |
vers = if ["all", "*"].include?(parts[1]) then nil else Gem::Version.new(parts[1]) end | |
return new(device, vers) | |
else | |
raise "couldn't parse 'device::deploymentTarget' from: #{str}" | |
end | |
end | |
def matches(thinningVariant) | |
thinningVariant.assert_kind_of(ThinningVariant) | |
return \ | |
(device.nil? || [thinningVariant.device.productType, thinningVariant.device.target, thinningVariant.traits.artworkDevIdiom].include?(device)) && | |
(deploymentTarget.nil? || deploymentTarget == thinningVariant.traits.deploymentTarget) && | |
true | |
end | |
def self.any_matches(specs, thinningVariant) | |
return specs.any? { |s| s.matches(thinningVariant) } | |
end | |
end | |
# Manages dsym/symbols outputs from bitcode recompilation. Maps input macho uuids to the dSYM and .symbols files of the recompiled output. | |
class SymbolsStore | |
def initialize | |
@dsymsByUUID = {} | |
@symbolsByUUID = {} | |
end | |
def addDSYM(uuid, path) | |
# todo we should have a type for uuids | |
uuid.assert_kind_of(String) | |
path.assert_kind_of(Pathname) | |
assert(@dsymsByUUID[uuid].nil?, "Duplicate dsym for #{uuid}") | |
@dsymsByUUID[uuid] = path | |
log { "SymbolsStore: Added .dSYM for #{uuid}: #{path}" } | |
end | |
def addSymbols(uuid, path) | |
# todo we should have a type for uuids | |
uuid.assert_kind_of(String) | |
path.assert_kind_of(Pathname) | |
assert(@symbolsByUUID[uuid].nil?, "Duplicate symbols for #{uuid}") | |
@symbolsByUUID[uuid] = path | |
log { "SymbolsStore: Added .symbols for #{uuid}: #{path}" } | |
end | |
def getDSYM(uuid) | |
@dsymsByUUID[uuid] | |
end | |
def getSymbols(uuid) | |
@symbolsByUUID[uuid] | |
end | |
end | |
class BitcodeContext | |
# :: [InputPath: ReassembledPath] | |
attr :reassembledMachOs | |
# :: SymbolsStore | |
attr :symbolsStore | |
def initialize | |
@symbolsStore = SymbolsStore.new | |
@reassembledMachOs = {} | |
end | |
def addReassembledMachO(input, output) | |
input.assert_kind_of(Pathname) | |
output.assert_kind_of(Pathname) | |
assert(reassembledMachOs[input].nil?, "Duplicate reassembled macho: #{input} -> #{output}") | |
reassembledMachOs[input] = output | |
end | |
end | |
# | |
# Parse Options | |
# | |
OS_VARIANTS_PLATFORM_VERSION = { | |
"iphoneos" => Gem::Version.new(12), | |
"watchos" => Gem::Version.new(4), | |
"appletvos" => Gem::Version.new(12), | |
} | |
ENCRYPTION_FORMAT_2_PLATFORM_VERSION = { | |
"iphoneos" => Gem::Version.new(13), | |
"watchos" => Gem::Version.new(6), | |
"appletvos" => Gem::Version.new(13), | |
} | |
OPTIONS = OpenStruct.new() | |
OPTIONS.thinDevices = [] | |
OPTIONS.skipThinDevices = [] | |
OPTIONS.coalescingGroups = [] | |
OPTIONS.verbosity = 1 | |
OPTIONS.useAssetutilEql = true | |
OPTIONS.translateWatchOS = true | |
OPTIONS.xcodeVersionWithStableSwiftABI = Gem::Version.new("10.2") | |
OPTIONS.iOSVersionWithWatchThinning = Gem::Version.new("13") | |
OPTIONS.watchOSVersionRemovedPlaceholders = Gem::Version.new("6") | |
OPTIONS.supportEncryptionFormat2 = true | |
# Only for variants / deployment targets pre 6.0 | |
$watchOSInfoPlistWhitelistEndVersion = Gem::Version.new("6") | |
$watchOSInfoPlistWhitelist = Set.new([ | |
"CFBundleDevelopmentRegion", | |
"CFBundleExecutable", | |
"CFBundleIcons", | |
"CFBundlePrimaryIcon", | |
"CFBundleIconFiles", | |
"UIPrerenderedIcon", | |
"CFBundleIdentifier", | |
"CFBundleInfoDictionaryVersion", | |
"CFBundleName", | |
"CFBundleDisplayName", | |
"CFBundlePackageType", | |
"CFBundleShortVersionString", | |
"CFBundleSignature", | |
"CFBundleVersion", | |
"PUICApplicationColors", | |
"PUICApplicationPrimaryColor", | |
"PrincipalClass", | |
"UIApplicationDelegateClass", | |
"UISupportedInterfaceOrientations", | |
"NSUserActivityTypes", | |
"UIAppFonts", | |
"UILaunchImages", | |
"UILaunchImageSize", | |
"UILaunchImageMinimumOSVersion", | |
"UILaunchImageName", | |
"WKCompanionAppBundleIdentifier", | |
"WKWatchKitApp", | |
"BuildMachineOSBuild", | |
"CFBundleInfoPlistURL", | |
"CFBundleResourceSpecification", | |
"CFBundleNumericVersion", | |
"CFBundleSupportedPlatforms", | |
"DTCompiler", | |
"DTPlatformBuild", | |
"DTPlatformName", | |
"DTPlatformVersion", | |
"DTSDKBuild", | |
"DTSDKName", | |
"DTXcode", | |
"DTXcodeBuild", | |
"MinimumOSVersion", | |
"UIDeviceFamily", | |
"CFBundleLocalizations", | |
"UIBackgroundModes", | |
"UIRequiredDeviceCapabilities", | |
"INAlternativeAppNames", | |
]) | |
OPTIONS.validateBitcode = true | |
OPTIONS.validateOutput = false | |
OPTIONS.validateOutputZeroVariants = nil | |
OPTIONS.bitcodeOptions = [] | |
USAGE_BANNER = "usage: #{File.basename(__FILE__).shellescape} <ipa-or-dir> [options] [--output=<ipa-or-dir>]" | |
OptionParser.new do |opts| | |
opts.banner = USAGE_BANNER | |
opts.on("--info", | |
"Prints information about the contents of the IPA") \ | |
do |v| | |
OPTIONS.printInfo = true | |
end | |
opts.on("--validate", | |
"Performs a variety of checks on the input IPA, reporting any errors or warnings on stderr as well as", | |
"in the output JSON file (if one is requested)") \ | |
do |v| | |
OPTIONS.validateInput = true | |
end | |
opts.on("--[no-]validate-bitcode") \ | |
do |v| | |
OPTIONS.validateBitcode = v | |
end | |
opts.on("--[no-]compile-bitcode", | |
"Compile any bitcode that's in the IPA; if there is none, a warning is emitted but it's not an error", | |
"If thinning is also enabled, the Bitcode won't be in any thinned IPAs") \ | |
do |v| | |
OPTIONS.compileBitcode = v | |
end | |
opts.on("--[no-]translate-watchos") \ | |
do |v| | |
OPTIONS.translateWatchOS = v | |
end | |
opts.on("--[no-]support-encryption-format-2") \ | |
do |v| | |
OPTIONS.supportEncryptionFormat2 = v | |
end | |
opts.on("--set-support-encryption-format-2-platform-version=PLATFORM_VERSION", String, | |
"Override the encryption format 2 introduction version for a given platform by passing 'platformIdentifier::version'") \ | |
do |v| | |
platform,version = v.split('::') | |
ENCRYPTION_FORMAT_2_PLATFORM_VERSION[platform] = Gem::Version.new(version) | |
end | |
opts.on("--xcode-version-with-stable-swift-abi=VERSION") \ | |
do |v| | |
v = Gem::Version.new(v) | |
assert(v.segments.count >= 1) | |
assert(v.segments.count <= 3) | |
assert(v.segments.all? {|s| s.kind_of?(Integer) }) | |
OPTIONS.xcodeVersionWithStableSwiftABI = v | |
end | |
opts.on("--create-thinned=DEVICE", String, | |
"Create thinned IPAs for devices with the given identifier", | |
"The identifier is either a device identifier or the special name 'all' to generate all supported variants") \ | |
do |v| | |
OPTIONS.thinDevices << ThinningVariantCLISpec.fromArg(v) | |
end | |
opts.on("--skip-thinned=DEVICE", String, | |
"When used with --create-thinned=all, skip this device", | |
"This allows --create-thinned=all to exclude certain devices", | |
"It can be passed multiple times") \ | |
do |v| | |
OPTIONS.skipThinDevices << ThinningVariantCLISpec.fromArg(v) | |
end | |
opts.on("--coalescing-group=DEVICE;DEVICE;DEVICE;...") \ | |
do |v| | |
variants = v.split(';').collect { |d| ThinningVariantCLISpec.fromArg(d) } | |
OPTIONS.coalescingGroups << variants | |
end | |
opts.on("--create-asset-packs", | |
"Create asset packs from any on-demand resources", | |
"The asset packs will be created in a directory next to the output", | |
"If thinning is also enabled, the created asset packs will also be thinned for each set of traits", | |
"If the --asset-manifest-url-prefix option is also provided, and AssetPackManifest.plist will be created") \ | |
do |v| | |
OPTIONS.createAssetPacks = true | |
end | |
opts.on("--asset-manifest-url-prefix=URL", | |
"URL prefix string for URLs in AssetPackManifest.plist", | |
"This string will be prepended verbatim to each asset pack URL in generated AssetPackManifest.plist files", | |
"Use this option to specify which server and subpath will vend the final, streamable asset packs", | |
"If this option is omitted when --create-asset-packs is specified, no AssetPackManifest.plist will be created") \ | |
do |v| | |
OPTIONS.urlPrefix = v | |
end | |
opts.on("--create-app-placeholders", | |
"Create placeholder app bundles for each created application", | |
"Placeholders just contain the Info.plist files, with additional information about ODR sizes etc") \ | |
do |v| | |
OPTIONS.createAppPlaceholders = true | |
end | |
opts.on("-o", "--output=PATH", String, | |
"Output path (either an non-thin IPA or directory of thin IPAs, depending on other options)", | |
"When thinning, this directory will be created to contain a set of output IPAs", | |
"Otherwise, this directory will be the single output IPA") \ | |
do |v| | |
OPTIONS.outputPath = Pathname(v).absolute? ? Pathname(v) : Pathname.getwd() + Pathname(v) | |
end | |
opts.on("-t", "--tmpdir=PATH", String, | |
"Path of directory to use as temporary directory", | |
"The directory will be created, if needed, but will not be removed at the end", | |
"If this option isn't provided, #{File.basename(__FILE__).shellescape} will create a unique temporary directory and remove it upon exit") \ | |
do |v| | |
OPTIONS.tmpPath = Pathname(v).absolute? ? Pathname(v) : Pathname.getwd() + Pathname(v) | |
end | |
opts.on("-j", "--jobs=N", | |
"Specifies the number of jobs to run simultaneously during bitcode compilation") \ | |
do |v| | |
OPTIONS.bitcodeCompilationJFactor = v.to_i | |
end | |
opts.on("--toolchain=DIR", | |
"Specify the toolchain path, if it isn't in the standard location realtive to ipatool", | |
"This is normally not needed when invoking ipatool as part of AppStoreTools, but is used when invoked by Xcode.app") \ | |
do |v| | |
OPTIONS.toolchainDir = Pathname(v).absolute? ? Pathname(v) : Pathname.getwd() + Pathname(v) | |
end | |
opts.on("--platforms=DIR", | |
"Specify the platforms path, if it isn't in the standard location realtive to ipatool", | |
"This is normally not needed when invoking ipatool as part of AppStoreTools, but is used when invoked by Xcode.app") \ | |
do |v| | |
OPTIONS.platformsDir = Pathname(v).absolute? ? Pathname(v) : Pathname.getwd() + Pathname(v) | |
end | |
opts.on("--bitcode-build-option=OPTION", | |
"The additional option passed to bitcode-build-tool", | |
"This is normally not needed when invoking ipatool as part of AppStoreTools, but is used for bitcode-build-tool debugging") \ | |
do |v| | |
OPTIONS.bitcodeOptions << v | |
end | |
opts.on("--[no-]use-assetutil-eql") do |v| | |
OPTIONS.useAssetutilEql = v | |
end | |
opts.on("--[no-]validate-output") do |v| | |
OPTIONS.validateOutput = v | |
end | |
opts.on("--[no-]validate-output-zero-variants") do |v| | |
OPTIONS.validateOutputZeroVariants = v | |
end | |
opts.on("--app-store-tools-build-version=PATH") do |v| | |
OPTIONS.appStoreToolsBuildVersion = v | |
end | |
opts.on("--app-store-tools-build-version-plist-path=PATH") do |v| | |
OPTIONS.appStoreToolsBuildVersion = appStoreToolsBuildVersionFromPlist(Pathname(v)) | |
end | |
opts.on_tail("--json=OUTPUT", | |
"Provide output in JSON format. Other programs invoking #{File.basename(__FILE__).shellescape} are expected to use this option instead", | |
"of trying to parse the default format. The output is a sequence of JSON dictionaries representing operations and issues.") \ | |
do |v| | |
begin | |
OPTIONS.jsonPipe = (v == "-") ? $stdout : File.open(v, 'w') | |
rescue SystemCallError, IOError => e | |
$stderr.puts "error: couldn't open the specified JSON file (#{e})" | |
exit 1 | |
end | |
end | |
opts.on_tail("--event-log=PATH", | |
"The path to the file where all event stream logging is written to") \ | |
do |v| | |
begin | |
OPTIONS.eventLogPipe = File.open(v, 'w') | |
OPTIONS.eventLog = EventLogStream.new(OPTIONS.eventLogPipe) | |
rescue SystemCallError, IOError => e | |
$stderr.puts "error: couldn't open the specified event log path (#{e})" | |
exit 1 | |
end | |
end | |
opts.on_tail("-v", "--verbose", | |
"Increase the verbosity level; this option can be specified multiple times, each time making output more verbose") \ | |
do |v| | |
OPTIONS.verbosity += 1 | |
end | |
opts.on_tail("-q", "--quiet", | |
"Makes ipatool completely silent; if this option is provided along with -v, the last one wins") \ | |
do |v| | |
OPTIONS.verbosity = 0 | |
end | |
opts.on_tail("--help", "Show this message") do | |
$stderr.puts opts | |
exit | |
end | |
end.parse! | |
if OPTIONS.validateOutputZeroVariants.nil? | |
OPTIONS.validateOutputZeroVariants = OPTIONS.validateOutput && OPTIONS.skipThinDevices.empty? | |
end | |
# | |
# Create Temporary Directory | |
# | |
# We might have been given a path for temporary files. If not, we pick one (respecting 'TMP'), and clean it up at the end. | |
FS.mkdir_p(OPTIONS.tmpPath) if OPTIONS.tmpPath | |
OPTIONS.tmpDir = Pathname(Dir.mktmpdir(File.basename(__FILE__, "rb"), OPTIONS.tmpPath)) | |
unless OPTIONS.tmpPath | |
at_exit { | |
log { "Removing temporary directory #{OPTIONS.tmpDir}..." } | |
FS.rm_r(OPTIONS.tmpDir) | |
} | |
end | |
# now options.tmpPath is whatever the user specified, and options.tmpDir is always a directory that exists in the file system | |
# | |
# Configure Structured Output | |
# | |
# During processing, structured output is added to the JsonOutput dictionary. | |
JsonOutput = { } | |
# Function to emit an error to stderr and also to the JSON output file, if defined. Note that in a lot of current cases we use the line number as the code. | |
# This is temporary — in the future we should either define a catalog of errors with assigned numbers, or switch to using alphanumeric identifiers. | |
$EMITTED_ERRORS = false | |
def EmitError(code, desc, type = nil, expl = nil, outputHandle = $stderr) | |
$EMITTED_ERRORS = true | |
outputHandle.puts "error: #{desc}" if outputHandle | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "ERROR", type: type, code: code, description: desc, info: {}, explanation: expl }.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
end | |
# Function to emit a warning to stderr and also to the JSON output file, if defined. | |
def EmitWarning(code, desc, type = nil, expl = nil, outputHandle = $stderr) | |
outputHandle.puts "warning: #{desc}" if outputHandle | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "WARN", type: type, code: code, description: desc, info: {}, explanation: expl }.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
end | |
# Function to emit an informational notice to stderr and also to the JSON output file, if defined. | |
def EmitInfo(code, desc, type = nil, expl = nil, outputHandle = $stderr) | |
outputHandle.puts "info: #{desc}" if outputHandle | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "INFO", type: type, code: code, description: desc, info: {}, explanation: expl }.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
end | |
# Calls EmitError and exits with an error code. | |
def FatalError(code, desc, exitCode = 1) | |
EmitError(code, desc, nil) | |
exit(exitCode) | |
end | |
# Calls EmitError with a type of "malformed-ipa" without exiting. | |
def EmitIPAStructureValidationError(desc) | |
EmitError(0, desc, "malformed-ipa") | |
end | |
# Calls EmitError with a type of "malformed-payload" without exiting. | |
def EmitIPAPayloadValidationError(desc) | |
EmitError(0, desc, "malformed-payload") | |
end | |
# Calls EmitError with a type of "malformed-payload" and exits | |
def EmitFatalIPAPayloadValidationError(desc, exitCode = 1) | |
EmitIPAPayloadValidationError(desc) | |
exit(exitCode) | |
end | |
# Calls EmitError with a type of "malformed-payload" without exiting. | |
def EmitIPAOutputValidationError(desc) | |
EmitError(0, desc, "malformed-output") | |
end | |
def LoadPlist(path, parentPathForDisplay = nil) | |
relPath = path | |
chdir = nil | |
begin | |
if parentPathForDisplay | |
relPath = path.relative_path_from(parentPathForDisplay) | |
chdir = parentPathForDisplay if parentPathForDisplay | |
end | |
rescue => ex | |
log { "warning: failed to format relative path (#{path}, #{parentPathForDisplay}): #{ex.message}" } | |
relPath = path | |
chdir = nil | |
end | |
out_str = CmdSpec.new(locate_tool("plutil"), ['-convert', 'binary1', '-o', '-', '--', relPath.to_s], nil, chdir).run(0, false, true) | |
pl = CFPropertyList::List.new | |
pl.load_str(out_str, CFPropertyList::List::FORMAT_BINARY) | |
return pl | |
end | |
# Loads a plist and categorizes parse errors as malformed-input. | |
def LoadUserPlist(path, parentPathForDisplay = nil) | |
begin | |
return LoadPlist(path, parentPathForDisplay) | |
rescue => ex | |
$stderr.puts "exception: #{ex.to_log_s}" | |
EmitFatalIPAPayloadValidationError("Failed to read property list: #{ex.message}") | |
end | |
end | |
def appStoreToolsBuildVersionFromPlist(path) | |
data = LoadPlist(path) | |
native = CFPropertyList::native_types(data.value) | |
return native["ProductBuildVersion"].as(String) | |
end | |
JsonOutputFormatting = { indent: ' ', space: ' ', object_nl: "\n", array_nl: "\n", max_nesting: false } | |
at_exit { | |
if $! && !$!.kind_of?(SystemExit) | |
EmitError(__LINE__, "ipatool failed with an exception: #{$!.to_log_s}", "exception", nil, nil) | |
end | |
OPTIONS.jsonPipe.puts JSON.generate(JsonOutput, JsonOutputFormatting) | |
} if OPTIONS.jsonPipe | |
# | |
# Check Arguments | |
# | |
# Check that we were given exactly one path: the IPA to operate on (either flattened or expanded). | |
if ARGV.count == 0 | |
$stderr.puts USAGE_BANNER | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "ERROR", code: -1, description: "error: invalid usage: no arguments provided", info: {}}.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
exit 1 | |
elsif ARGV.count == 1 | |
OPTIONS.inputPath = Pathname(ARGV[0]) | |
if not (OPTIONS.printInfo or OPTIONS.validateInput or OPTIONS.compileBitcode or !OPTIONS.thinDevices.empty? or OPTIONS.createAssetPacks) | |
$stderr.puts "error: no action specified" | |
$stderr.puts USAGE_BANNER | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "ERROR", code: -1, description: "error: invalid usage: no action specified", info: {}}.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
exit 1 | |
end | |
else | |
$stderr.puts "error: too many input files specified" | |
$stderr.puts USAGE_BANNER | |
JsonOutput[:alerts] = [] unless JsonOutput[:alerts] | |
JsonOutput[:alerts] << { level: "ERROR", code: -1, description: "error: invalid usage: too many input files specified", info: {}}.reject { |k, v| v.nil? } if OPTIONS.jsonPipe | |
exit 1 | |
end | |
# If we weren't given a toolchain directory, we try to locate one. | |
unless OPTIONS.toolchainDir | |
# On the AppStoreTools disk image, the toolchain directory is the parent of the directory that contains ipatool. | |
OPTIONS.toolchainDir = Pathname(__dir__).parent | |
end | |
# Make sure we did end up with a valid toolchain directory. | |
unless OPTIONS.toolchainDir.directory? | |
FatalError(__LINE__, "toolchain directory #{OPTIONS.toolchainDir} #{OPTIONS.toolchainDir.exist? ? "isn't a directory" : "doesn't exist"}") | |
end | |
# Warn if the user specified an asset pack prefix URL that will never get used because asset packs aren't being created. | |
if OPTIONS.urlPrefix and not OPTIONS.createAssetPacks | |
EmitWarning(__LINE__, "asset pack creation not requested, so --asset-manifest-url-prefix is ignored") | |
end | |
# If we weren't given a platform directory, we try to locate one. | |
unless OPTIONS.platformsDir | |
# On the AppStoreTools disk image, the platforms directory is off of the parent directory of the toolchains directory. | |
OPTIONS.platformsDir = OPTIONS.toolchainDir.parent + "Platforms" | |
end | |
# Make sure we did end up with a valid platforms directory. | |
unless OPTIONS.platformsDir.directory? | |
FatalError(__LINE__, "platforms directory #{OPTIONS.platformsDir} #{OPTIONS.platformsDir.exist? ? "isn't a directory" : "doesn't exist"}") | |
end | |
# Prepend the tools directory to the command line search path. | |
ENV["PATH"] = "#{OPTIONS.toolchainDir}/bin" + ":" + ENV["PATH"] | |
# Check that we were given an output directory if we're asked to compile bitcode and/or create thinned outputs. | |
if (OPTIONS.compileBitcode or !OPTIONS.thinDevices.empty? or OPTIONS.createAssetPacks) and not OPTIONS.outputPath | |
actions = [] | |
actions << "bitcode compilation" if OPTIONS.compileBitcode | |
actions << "device-specific thinning" if !OPTIONS.thinDevices.empty? | |
actions << "asset-pack creation" if OPTIONS.createAssetPacks | |
FatalError(__LINE__, "#{actions.join(" and ")} requested, but no --output argument provided") | |
end | |
if OPTIONS.appStoreToolsBuildVersion == nil | |
candidates = [ | |
File.dirname(__FILE__) + "/../../version.plist", | |
File.dirname(__FILE__) + "/../../../version.plist", | |
] | |
path = candidates.detect { |p| File.exist?(p) } | |
FatalError(__LINE__, "Could not find version.plist. Expected at #{candidates}. To override, pass a version in --app-store-tools-build-version or a path to a version.plist in --app-store-tools-build-version-plist-path.") if path.nil? | |
log { "Loading DTAppStoreToolsBuild via ProductBuildVersion from #{path}" } | |
OPTIONS.appStoreToolsBuildVersion = appStoreToolsBuildVersionFromPlist(Pathname(path)) | |
end | |
APP_STORE_TOOLS_BUILD_VERSION_PATTERN = /^\d+\D+\d+\D*$/ | |
unless OPTIONS.appStoreToolsBuildVersion =~ APP_STORE_TOOLS_BUILD_VERSION_PATTERN | |
FatalError(__LINE__, "Expected #{OPTIONS.appStoreToolsBuildVersion} to match #{APP_STORE_TOOLS_BUILD_VERSION_PATTERN}, e.g. 10A208 or 10A208a") | |
end | |
# | |
# Unpack Flattened IPA (If Needed) | |
# | |
# The input path may be either a file or a directory; if it's a file, we unpack it into our temporary directory. | |
OPTIONS.ipaName = OPTIONS.inputPath.basename | |
if OPTIONS.inputPath.file? | |
# Use 'ditto -x -k' to unpack the IPA (it's really just a Zip file, for now); later we'll use the StreamingZip tool. | |
log { "Unpacking #{OPTIONS.inputPath.basename.to_s.shellescape}..." } | |
unpackedDir = OPTIONS.tmpDir.join("ipa") | |
CmdSpec.new("ditto", ["-x", "-k", "--noqtn", "--noacl", OPTIONS.inputPath, unpackedDir]).run(0, false) | |
OPTIONS.inputPath = unpackedDir | |
end | |
log { "IPA contents:" } | |
log { CmdSpec.new(locate_tool("find"), ["-s", "-f", OPTIONS.inputPath]).run(1, false, false) } | |
# | |
# Data Model | |
# | |
# Represents a single Mach-O image, backed by either a thin file or a slice of a fat file. | |
class MachOImage | |
attr :type # Type of file, e.g. MH_EXECUTE, MH_DYLIB, etc | |
attr :platformIdentifier # Platform (macosx, iphoneos, etc) | |
attr :arch # Architecture, e.g. x86_64 or arm64 | |
attr :uuid # UUID, from the UUID load command | |
attr :hasBitcode # True if the Mach-O image contains bitcode | |
attr :hasExecCode # True if the Mach-O image contains executable code | |
attr :isSigned # True if the Mach-O image is signed | |
attr_accessor :machoFile # The Mach-O file of which this image is a part (an FSMachOFileNode object) | |
attr :loadedDylibs | |
attr :rpaths | |
def initialize(type, platformIdentifier, arch, uuid, hasBitcode, hasExecCode, isSigned, loadedDylibs, rpaths) | |
@type = type | |
@platformIdentifier = platformIdentifier | |
@arch = arch | |
@uuid = uuid | |
@hasBitcode = hasBitcode | |
@hasExecCode = hasExecCode | |
@isSigned = isSigned | |
@loadedDylibs = loadedDylibs | |
@rpaths = rpaths | |
@machoFile = nil # this will be set later, when we're attached to an FSMachOFileNode | |
end | |
def platform | |
return Platform.platformForIdentifer(self.platformIdentifier) | |
end | |
# We need to override to_s and inspect because MachOImage has a pointer back to its MachOFile and that cycle breaks the default inspect. | |
def to_s | |
return "<MachOImage #{self.machoFile.path} #{self.arch}>" | |
end | |
def inspect | |
return to_s | |
end | |
def thinToPath(dstPath, level) | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
# Creates a thin Mach-O file at the given path, taking the contents from the Mach-O image that backs this object. | |
# If the backing is already thin, we can just copy the file; otherwise we need to use lipo(1) to extract the arch | |
# slice into a new thin file. | |
FS.mkdir_p(dstPath.parent) | |
unless machoFile.isFat | |
# We already have a thin file (either from the start, or because it has already been thinned earlier). | |
FS.cp(self.machoFile.path, dstPath) | |
else | |
# We don't have a thin file, so we use lipo(1) to extract the slice from our Mach-O file. | |
CmdSpec.new(locate_tool("lipo", [self.platform.toolsPath]), ["-thin", arch, machoFile.path, "-output", dstPath]).run(level, true) | |
end | |
end | |
def isDylib | |
return self.type == 'MH_DYLIB' | |
end | |
# Where are we storing the thinned version of this image before we process it (strip or recompile bitcode)? | |
def thinnedInputPath(ipa, options) | |
target_arch = arch | |
return self.machoFile.path.reparent(ipa.path, options.tmpDir + "thinned-in" + target_arch) | |
end | |
# Where are we storing the thinned version of this image after we've processed it (stripped or recompiled bitcode)? | |
# todo different paths for recompiling or not? | |
def thinnedOutputPath(ipa, options, target_arch = arch) | |
return self.machoFile.path.reparent(ipa.path, options.tmpDir + "thinned-out" + target_arch) | |
end | |
def dylibNames | |
return @dylibNames ||= self.loadedDylibs.map{|l| l.sub(/^.*\/([^\/]*)$/, '\1') } | |
end | |
# Should we exclude this image from the output IPA? | |
def shouldExcludeFromOutput | |
if not defined? @shouldExcludeFromOutput | |
@shouldExcludeFromOutput = lambda { | |
return !self.machoFile.enclosingBundle.bitcodeCompilationRootMachO.machoImages.any?{ |img| | |
img.arch == self.arch | |
} | |
}.call | |
end | |
return @shouldExcludeFromOutput | |
end | |
def shouldCompileBitcode(options) | |
return (options.compileBitcode && | |
self.hasBitcode && | |
!self.shouldExcludeFromOutput && | |
self.machoFile.enclosingBundle.bitcodeCompilationRootMachO.machoImages.any? { |img| | |
img.hasBitcode && img.arch == self.arch | |
}) | |
end | |
def shouldStripBitcode(options) | |
return (self.hasBitcode && | |
!self.shouldExcludeFromOutput && | |
(!options.compileBitcode || | |
self.machoFile.enclosingBundle.bitcodeCompilationRootMachO.machoImages.any?{ |img| | |
!img.hasBitcode && img.arch == self.arch | |
})) | |
end | |
def copySymbolsToIPA(symbolsStore, outputIPAPath) | |
dsymFile = symbolsStore.getDSYM(uuid) | |
if dsymFile | |
dst = outputIPAPath + "dSYMs" | |
FS.mkdir_p(dst) | |
FS.cp_r(dsymFile, dst + dsymFile.basename) | |
end | |
symbolsFile = symbolsStore.getSymbols(uuid) | |
if symbolsFile | |
dst = outputIPAPath + "Symbols" | |
FS.mkdir_p(dst) | |
FS.cp_r(symbolsFile, dst + symbolsFile.basename) | |
end | |
end | |
end | |
class Pathname | |
# /a/foo reparent from /a to /b => /b/foo | |
def reparent(old_parent, new_parent) | |
relpath = self.relative_path_from(old_parent) || (raise "#{old_parent} is not a parent of #{self}") | |
return new_parent + relpath | |
end | |
end | |
# Represents a file system entity. | |
class FSNode | |
attr_accessor :path # Pathname for the file in the file system (can be relocated in special cases) | |
attr :enclosingBundle # Reference to the bundle in which we're nested, or nil if we're the top | |
def initialize(path, enclosingBundle) | |
path = Pathname(path) unless path.kind_of?(Pathname) | |
@path = path | |
@enclosingBundle = enclosingBundle | |
end | |
def name | |
return path.basename.to_s | |
end | |
def subnodes | |
return nil | |
end | |
end | |
class FSFileNode < FSNode | |
def copyToPath(dstPath, thinningContext, level = 0) | |
log { "#{" "*level}copy #{name.shellescape}" } | |
# Copy the file contents to the destination path. | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
FS.cp(path, dstPath) | |
thinningContext.recordIO(path, dstPath) | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) | |
end | |
end | |
class FSSymlinkNode < FSNode | |
def copyToPath(dstPath, thinningContext, level = 0) | |
log { "#{" "*level}slnk #{name.shellescape}" } | |
# Copy the file contents to the destination path. | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
dstPath.make_symlink(path.readlink()) | |
thinningContext.recordIO(path, dstPath) | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) | |
end | |
end | |
class FSDirectoryNode < FSNode | |
attr :subnodes # Nodes representing the directory contents | |
def initialize(path, enclosingBundle) | |
super(path, enclosingBundle) | |
@subnodes = [] | |
end | |
def copyToPath(dstPath, thinningContext, level = 0) | |
log { "#{" "*level}mkdr #{name.shellescape}/" } | |
# Create the top-level directory and then recurse downward. | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
FS.mkdir_p(dstPath) | |
thinningContext.recordIO(path, dstPath) | |
# Copy the file contents to the destination path. | |
for subnode in subnodes | |
subnode.copyToPath(dstPath + subnode.name, thinningContext, level + 1) | |
end | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) | |
end | |
# Calls block or returns an enumerator with every descendant model object of this directory | |
def enumerateTree | |
return enum_for(:enumerateTree) unless block_given? | |
yield self | |
self.subnodes.each { |n| | |
if n.respond_to? :enumerateTree | |
n.enumerateTree {|n2| yield n2} | |
else | |
yield n | |
end | |
} | |
end | |
end | |
class FSMachOFileNode < FSFileNode | |
# Array of Mach-O images contained in the file (note that even if one, it could be a fat file) | |
attr_accessor :machoImages | |
# MH_EXECUTE, MH_DYLIB, etc (technically an image property, but all should be the same) | |
attr :type | |
def initialize(path, enclosingBundle, machoImages) | |
super(path, enclosingBundle) | |
assert(machoImages.size > 0) | |
@machoImages = machoImages.each{ |img|img.machoFile = self } | |
@type = machoImages.first.type | |
assert(machoImages.all? { |i| i.type == type }, "Mismatched types in MachO images: #{machoImages}") | |
end | |
def platform | |
return self.machoImages[0].platform | |
end | |
def isFat | |
_,isUniversal = lipo_archs(path) | |
return isUniversal | |
end | |
# Given #arch, pick the best image to link against. Returns nil if we can't link any image in this MachO against #arch. | |
def linkableImageForArch(arch) | |
# Find the best Mach-O image to use, which matches the earliest linkable architecture. | |
return CPUArchitecture.get(arch).linkable_architectures.map { |li| | |
machoImages.find { |mi| mi.arch == li } | |
}.compact.first | |
end | |
def linkableImageForArchOrError(arch) | |
machoImage = self.linkableImageForArch(arch) | |
unless machoImage | |
EmitFatalIPAPayloadValidationError("couldn't find suitable architecture for linking #{arch} in Mach-O file #{path.to_s.shellescape}") | |
end | |
return machoImage | |
end | |
def shouldThin | |
return self.enclosingBundle.shouldThin | |
end | |
def copyToPath(dstPath, thinningContext, level = 0) | |
thinArch = nil | |
if thinningContext.traits && self.shouldThin | |
if thinningContext.traits.hasSwiftRuntime && self.isSwiftRuntimeDylib | |
return | |
end | |
thinArch = thinningContext.traits.preferredArch | |
elsif self.platform.identifier == "watchos" && thinningContext.mainTraits && !thinningContext.mainTraits.supportsEmbeddedUniversalWatchApp | |
thinArch = "armv7k" | |
end | |
thinningContext.recordIO(path, dstPath) | |
reassembledPath = thinningContext.bitcodeContext.reassembledMachOs[path] | |
unless reassembledPath.nil? | |
newImages = GetMachOImagesFromOToolInfoForFile(reassembledPath) | |
src = FSMachOFileNode.new(reassembledPath, enclosingBundle, newImages) | |
# Pass down a dummy IO map | |
subThinningContext = ThinningContext.new(thinningContext.traits, thinningContext.mainTraits, thinningContext.variants, thinningContext.outputIPAPath, thinningContext.bitcodeContext, nil) | |
src.copyToPath(dstPath, subThinningContext, level) | |
return | |
end | |
if thinArch | |
log { "#{" "*level}thin #{name.shellescape}" } | |
machoImage = self.linkableImageForArchOrError(thinArch) | |
# Ask the Mach-O image to emit a thin binary to the destination path. | |
machoImage.thinToPath(dstPath, level) | |
machoImage.copySymbolsToIPA(thinningContext.bitcodeContext.symbolsStore, thinningContext.outputIPAPath) | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) | |
else | |
for machoImage in machoImages | |
machoImage.copySymbolsToIPA(thinningContext.bitcodeContext.symbolsStore, thinningContext.outputIPAPath) | |
end | |
super(dstPath, thinningContext, level) | |
end | |
end | |
def isDylib | |
return machoImages.first.isDylib | |
end | |
def isSwiftRuntimeDylib | |
return isDylib && self.name =~ /libswift.*\.dylib/ | |
end | |
# After we've recompiled each image, where do we store the universal reassembled MachO? | |
def universalReassemblyPath(ipa, options) | |
return self.path.reparent(ipa.path, options.tmpDir + "universal-MachOs") | |
end | |
end | |
class FSAssetCatalogFileNode < FSFileNode | |
def shouldThin | |
return self.enclosingBundle.shouldThin && !self.enclosingBundle.isWatchKit1AppExtension && !self.enclosingBundle.isWatchKit1StubApp | |
end | |
def copyToPath(dstPath, thinningContext, level = 0) | |
if thinningContext.traits && self.shouldThin | |
log { "#{" "*level}asct #{name.shellescape}" } | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
CmdSpec.new(locate_tool("assetutil", [self.enclosingBundle.platform.toolsPath]), | |
(thinningContext.traits.to_assetutil_args_array + | |
[ | |
"--output=#{dstPath}", | |
path, | |
]) | |
).run(level) | |
thinningContext.recordIO(path, dstPath) | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) rescue Errno::ENOENT | |
else | |
super(dstPath, thinningContext, level) | |
end | |
end | |
# Would traits a and b produce the same output when they thin this AssetCatalog? A return value of true is conclusive, whereas false is not. It would be too expensive to compute a conclusive no. | |
def eql_for_traits?(a, b, level = 0) | |
a_arg = a.to_assetutil_T_string | |
b_arg = b.to_assetutil_T_string | |
return true if a_arg == b_arg | |
# Sort args so that we'll share cache hits for A/B and B/A | |
traits_args = [a_arg, b_arg].sort.join('/') | |
output = CmdSpec.new(locate_tool("assetutil", [self.enclosingBundle.platform.toolsPath]), | |
[ | |
"-T", | |
"#{traits_args}", | |
self.path, | |
] | |
).run(level, true, true) | |
return false if output.start_with?("not equal ") | |
if output.start_with?("equal ") | |
return true | |
end | |
raise "Unknown assetutil -T output: #{output}" | |
end | |
end | |
class FSBundleDirectoryNode < FSDirectoryNode | |
attr :infoDict # Info.plist contents of the bundle (always a dictionary) | |
attr :requiredDeviceCapabilities # Set of required device capabilities | |
# This class has multi-stage initialization. Anything which uses the below is not safe until initialized is true. | |
attr_accessor :initialized | |
attr :machoFiles # Any Mach-O files in the bundle (but not in nested bundles) | |
attr_accessor :mainMachoFile # Main executable Mach-O file, nil if none (always included in machoFiles) | |
attr :nestedBundles # Any nested bundles (but not any nested bundles inside those bundles) | |
def initialize(path, enclosingBundle, infoDict) | |
super(path, enclosingBundle) | |
@infoDict = infoDict | |
# The following properties will be populated by the logic that creates us. | |
@machoFiles = [] | |
@mainMachoFile = nil | |
@nestedBundles = [] | |
# Examine UIRequiredDeviceCapabilities | |
@requiredDevCaps = infoDict["UIRequiredDeviceCapabilities"] || {} | |
if @requiredDevCaps.kind_of?(Array) | |
dict = {} | |
for cap in @requiredDevCaps | |
dict[cap] = true | |
end | |
@requiredDevCaps = dict | |
end | |
end | |
def self.getInfoPlistIfPathIsBundle(path, parentPathForDisplay = nil) | |
# Check if the given path seems like a bundle, and if so, return its Info.plist contents. | |
return nil unless path.directory? | |
infoPlistPath = path + "Info.plist" | |
return nil unless infoPlistPath.file? | |
infoPlist = LoadUserPlist(infoPlistPath, parentPathForDisplay) | |
nativeInfoPlist = CFPropertyList::native_types(infoPlist.value) unless infoPlist == nil | |
return nil unless nativeInfoPlist && nativeInfoPlist.kind_of?(Hash) && nativeInfoPlist["CFBundleIdentifier"] | |
return nativeInfoPlist | |
end | |
def identifier() | |
return infoDict["CFBundleIdentifier"] | |
end | |
def supportedIdioms | |
if not defined? @supportedIdioms | |
@supportedIdioms = lambda { | |
family = infoDict["UIDeviceFamily"] | |
if family.nil_if_empty.nil? | |
if self.enclosingBundle && self.platformIdentifier == self.enclosingBundle.platformIdentifier | |
return self.enclosingBundle.supportedIdioms | |
else | |
return nil | |
end | |
end | |
return ThinningTraits.assetutil_idioms_for_UIDeviceFamily(family) | |
}.call | |
end | |
return @supportedIdioms | |
end | |
def statedPlatformIdentifier | |
supportedPlatformIdents = infoDict["CFBundleSupportedPlatforms"] | |
if self.enclosingBundle && supportedPlatformIdents.nil_if_empty.nil? | |
return self.enclosingBundle.statedPlatformIdentifier | |
end | |
if !supportedPlatformIdents.kind_of?(Array) || supportedPlatformIdents.count != 1 | |
EmitFatalIPAPayloadValidationError("#{path.basename.to_s.shellescape}/Info.plist should specify CFBundleSupportedPlatforms with an array containing a single platform, e.g. CFBundleSupportedPlatforms = [ iPhoneOS ], but it has CFBundleSupportedPlatforms = #{supportedPlatformIdents}") | |
end | |
platformIdent = supportedPlatformIdents.first.downcase | |
# WatchKit 1 app bundle | |
platformIdent = "watchos" if platformIdent == "iphoneos" and isWatchKit1StubApp | |
return platformIdent | |
end | |
def platformIdentifier | |
platformIdent = statedPlatformIdentifier | |
# Plain resources bundle | |
assert(initialized, path.to_s) # assert that we've finalized mutable fields (machoFiles and nestedBundles) | |
platformIdent = self.enclosingBundle.platformIdentifier if self.enclosingBundle && self.machoFiles.empty? && self.nestedBundles.empty? | |
return platformIdent | |
end | |
def platform | |
return Platform.platformForIdentifer(self.platformIdentifier) | |
end | |
# Gem::Version | |
def deploymentTarget | |
return @deploymentTarget ||= Gem::Version.new(infoDict["MinimumOSVersion"]) | |
end | |
# Gem::Version | |
def builtWithXcodeVersion | |
return @builtWithXcodeVersion if defined? @builtWithXcodeVersion | |
v = infoDict["DTXcode"].to_i | |
major = v / 100 | |
minor = (v % 100) / 10 | |
update = v % 10 | |
@builtWithXcodeVersion = Gem::Version.new("#{major}.#{minor}.#{update}") | |
return @builtWithXcodeVersion | |
end | |
def supportedArchitectures() | |
assert(initialized, path.to_s) | |
return @supportedArchs ||= machoFiles.map{ |file| file.machoImages.map{ |mimg| mimg.arch } }.reduce(:&) || [] | |
end | |
def isSpotlightAppExtBundle() | |
extnDict = infoDict["NSExtension"] | |
return false unless extnDict | |
return extnDict["NSExtensionPointIdentifier"] == "com.apple.spotlight.index" | |
end | |
def infoPropertyListPath() | |
return path + "Info.plist" | |
end | |
def shouldThin | |
return !enclosingBundle || (enclosingBundle.shouldThin && platformIdentifier == enclosingBundle.platformIdentifier) | |
end | |
def copyToPath(dstPath, thinningContext, level = 0) | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
log { "#{" "*level}bndl #{name.shellescape} (#{platformIdentifier})" } | |
# If our platform is distinct from the native platform of the parent bundle, we disable the thinning traits for | |
# this subtree. This is because in that case, this is actually a payload intended to be sent to another device | |
# at a later time, and thinning it would be the wrong thing to do. | |
thinningContext = thinningContext.subscope(nil) if !self.shouldThin | |
# Modify the thinning traits to account for iPhone-only apps in compatibility mode on iPads. | |
if thinningContext.traits and thinningContext.traits.artworkDevIdiom == "pad" and self.supportedIdioms == ["phone"] | |
# It's an iPhone-only app being thinned for iPad. We do some magic on the thinning traits to reflect the compatibility environment. | |
traits = ThinningTraits.new(thinningContext.traits.deploymentTarget, thinningContext.traits.passDeploymentTargetToAssetutil, thinningContext.traits.preferredArch, "phone", thinningContext.traits.artworkHostedIdioms, 2, 0, thinningContext.traits.artworkDisplayGamut, thinningContext.traits.artworkDynamicDisplayMode, thinningContext.traits.devPerfMemoryClass,thinningContext.traits.gfxFeatureSetClass, thinningContext.traits.gfxFeatureSetFallbacks, thinningContext.traits.featuresToRemove, thinningContext.traits.supportsEmbeddedWatchApp, thinningContext.traits.supportsEmbeddedUniversalWatchApp, thinningContext.traits.hasSwiftRuntime, thinningContext.traits.supportsEncryptionFormat2, thinningContext.traits.coalescingGroup).freeze | |
thinningContext = ThinningContext.new(traits, traits, thinningContext.variants, thinningContext.outputIPAPath, thinningContext.bitcodeContext, thinningContext.inputOutputMap) | |
log { "#{" "*level} (adjusting thinning traits to #{thinningContext.traits} for an iPhone-only app on an iPad)" } | |
end | |
# If we are a Spotlight extension and the thinning traits say that we should strip it out, we skip the copying. | |
if self.isSpotlightAppExtBundle() and thinningContext.traits and thinningContext.traits.featuresToRemove.include?("spotlight") then | |
log { "#{" "*level} (skipping copying of a Spotlight Extension on this device)" } | |
skipCopying = true | |
end | |
if self.platform.identifier == "watchos" \ | |
&& thinningContext.mainTraits && !thinningContext.mainTraits.supportsEmbeddedWatchApp \ | |
&& self.enclosingBundle && self.enclosingBundle.platform.identifier == "iphoneos" \ | |
&& (self.isWatchKit2StubApp || self.isWatchKit1AppExtension) | |
log { "#{" "*level} (skipping copying of a bundle because thinning traits don't support embedded watch content)" } | |
skipCopying = true | |
if self.isWatchKit2StubApp | |
makeWatchPlaceholderForiOS(self.enclosingBundle, dstPath.parent.parent) | |
end | |
end | |
if thinningContext.traits && thinningContext.traits.preferredArch | |
runnableArchNames = CPUArchitecture.get(thinningContext.traits.preferredArch).runnable_architectures | |
bundleRequiredArch = CPUArchitecture.all.find{|cpuArch| @requiredDevCaps[cpuArch.name] == true } | |
if bundleRequiredArch && !runnableArchNames.include?(bundleRequiredArch.name) | |
log { "#{" "*level} (skipping copying of a bundle due to incompatible architecture (thinning for runnable #{thinningContext.traits.preferredArch}, bundle requires #{bundleRequiredArch.name}))" } | |
skipCopying = true | |
end | |
end | |
# Unless we've decided to skip this bundle completely, we copy it now (while possibly thinning). | |
unless skipCopying | |
# Create the top-level directory and then recurse downward. | |
FS.mkdir_p(dstPath) | |
thinningContext.recordIO(path, dstPath) | |
# Copy the file contents to the destination path (possibly applying thinning). | |
for subnode in subnodes | |
subnode.copyToPath(dstPath + subnode.name, thinningContext, level + 1) | |
end | |
# Make the mode of the destination the same as the source. | |
dstPath.lchmod(path.stat().mode) | |
end | |
end | |
def thinnableAssetCatalogs | |
return self.enumerateTree.select{ |node| node.kind_of?(FSAssetCatalogFileNode) && node.shouldThin } | |
end | |
end | |
# WatchKit extras | |
class FSBundleDirectoryNode | |
def isWatchKitStubApp | |
return path.extname == ".app" && plistBoolValue(infoDict["WKWatchKitApp"] || false) | |
end | |
def isWatchKitAppExtension | |
return path.extname == ".appex" && | |
self.infoDict["NSExtension"].kind_of?(Hash) && | |
self.infoDict["NSExtension"]["NSExtensionPointIdentifier"] == "com.apple.watchkit" | |
end | |
def isWatchKit1StubApp | |
return isWatchKitStubApp && enclosingBundle && enclosingBundle.isWatchKitAppExtension | |
end | |
def isWatchKit2StubApp | |
return isWatchKitStubApp && !isWatchKit1StubApp | |
end | |
def isWatchKit1AppExtension | |
return isWatchKitAppExtension && enclosingBundle && !enclosingBundle.isWatchKitStubApp | |
end | |
def isWatchKit2AppExtension | |
return isWatchKitAppExtension && !isWatchKit1StubApp | |
end | |
attr_accessor :watchKit2ContaineriOSApp | |
end | |
# Bitcode recompilation extras | |
class FSBundleDirectoryNode | |
def watchAppChild | |
assert(initialized, path.to_s) | |
if not defined? @watchAppChild | |
@watchAppChild = lambda { | |
return nestedBundles.detect { |b| b.isWatchKitStubApp } | |
}.call | |
end | |
return @watchAppChild | |
end | |
def watchAppExChild | |
assert(initialized, path.to_s) | |
if not defined? @watchAppExChild | |
@watchAppExChild = lambda { | |
return (if self.isWatchKitStubApp then self.nestedBundles.detect{|b| b.isWatchKitAppExtension } else nil end) | |
}.call | |
end | |
return @watchAppExChild | |
end | |
# This is usually the main app in an IPA, but iOS apps which contain watchOS apps will have two roots for bitcode compilation (one for iOS, one for watchOS). In the abstract, bitcode compilation needs to happen in linkage dependency order, so it starts with a tree of bundles and resolves dependencies within that tree. Dependency resolution cannot cross platform boundaries, so a bundle with a new platform becomes a new root. | |
def isBitcodeCompilationRootBundle | |
if not defined? @isBitcodeCompilationRootBundle | |
@isBitcodeCompilationRootBundle = lambda { | |
return (!self.enclosingBundle || | |
self.platformIdentifier != self.enclosingBundle.platformIdentifier || | |
self.watchAppExChild != nil) | |
}.call | |
end | |
return @isBitcodeCompilationRootBundle | |
end | |
# Find the root bundle (see isBitcodeCompilationRootBundle) for the current tree. E.g. if this bundle is an iOS appex, this method will return the parent iOS app. | |
def bitcodeCompilationRootBundle | |
if not defined? @bitcodeCompilationRootBundle | |
@bitcodeCompilationRootBundle = lambda { | |
return (if self.isBitcodeCompilationRootBundle then self else self.enclosingBundle.bitcodeCompilationRootBundle end) | |
}.call | |
end | |
return @bitcodeCompilationRootBundle | |
end | |
# Find the root bundle (see isBitcodeCompilationRootBundle and bitcodeCompilationRootBundle), then find the root mach-o from there. It's not necessarily the same as the bundle's mainMachoFile; watchOS apps contain a stub as their mainMachoFile, so the "root" mach-o as far as bitcode compilation is concerned is actually the user's appex (see isWatchKitAppExtension). | |
def bitcodeCompilationRootMachO | |
assert(initialized, path.to_s) | |
if not defined? @bitcodeCompilationRootMachO | |
@bitcodeCompilationRootMachO = lambda { | |
root = bitcodeCompilationRootBundle | |
actualRoot = root.watchAppExChild || root | |
return actualRoot.mainMachoFile | |
}.call | |
end | |
return @bitcodeCompilationRootMachO | |
end | |
end | |
# An IPA object represents a directory tree containing the expanded contents of a .ipa file. The IPA's path is the | |
# top-level directory. The mainBundle property is the FSBundleDirectoryNode object that represents the main bundle | |
# underneath the IPA's Payload directory (if any). | |
class IPA | |
attr :path # Pathname for the file in the file system | |
attr :mainBundle # Reference to the main bundle inside the payload directory | |
attr :linkageGraph # LinkageGraph for all the MachOImage objects | |
attr :vpnPlugins # Optional vpnplugin bundles inside the payload directory | |
attr :assetPacks # Array of asset packs in the OnDemandResources directory, if any | |
def initialize(path, appPath = nil) | |
# Scan the file system, constructing the | |
path = Pathname(path) unless path.kind_of?(Pathname) | |
@path = path | |
EmitIPAStructureValidationError("IPA has no Payload directory") unless payloadPath.exist? | |
if appPath.nil? | |
appPaths = payloadPath.children.sort.select{ |chld| chld.extname == ".app" } | |
appNames = appPaths.collect {|p| p.basename } | |
EmitIPAStructureValidationError("IPA has no main app") if appPaths.count == 0 | |
EmitIPAStructureValidationError("IPA has multiple top-level apps: #{appNames}") if appPaths.count > 1 | |
appPath = appPaths.first | |
end | |
@mainBundle = MakeFileSystemNode(appPath, nil, nil, payloadPath) | |
EmitIPAStructureValidationError("Couldn't load app at #{appPath}") unless mainBundle | |
EmitIPAStructureValidationError("Couldn't load app at #{appPath}, found: #{mainBundle.class.name}: #{mainBundle}") unless mainBundle.kind_of?(FSBundleDirectoryNode) | |
machOs = mainBundle.enumerateTree.select{|n| n.kind_of?(FSMachOFileNode) } | |
# Cache [real path -> MachO] for use below | |
machOsByRealPath = Hash[machOs.map{|m| [m.path.realpath, m] }] | |
# Setup fallback rpaths for otherwise unresolved linkage. This ensures that we maintain previous behavior (link by name) if we can't resolve @rpaths. This can happen if the app includes a dylib that the executable does not link (maybe it's dlopen'ed instead). | |
rpathFallbackStack = machOs.map{|m| LinkageGraph.parent_path_for_macho_binary_or_framework(m.path) }.uniq | |
@linkageGraph = LinkageGraph.new | |
# Resolve dylibs, starting with executables so that we can get a concrete @executable_path. | |
machOs. | |
# Partition by executable so that they go first, then flatten so that we process executables and dylibs as one collection. | |
partition{|m| m.type == "MH_EXECUTE" }.flatten. | |
each{|m| | |
m.machoImages.each {|img| | |
self.linkageGraph.resolveLoadedDylibs(img, rpathFallbackStack, [], machOsByRealPath) | |
} | |
} | |
log { "Complete LinkageGraph:\n#{self.linkageGraph.to_s(mainBundle.path.parent).indent}" } | |
if supportsVPNPlugins | |
vpnPluginPaths = payloadPath.children.sort.select{ |chld| chld.extname == ".vpnplugin" } | |
@vpnPlugins = vpnPluginPaths.map{|p| MakeFileSystemNode(p, nil, nil, payloadPath) } | |
else | |
@vpnPlugins = [] | |
end | |
if supportsAssetPacks && assetPacksPath.exist? | |
assetPackPaths = assetPacksPath.children.sort.select{ |chld| chld.extname == ".assetpack" } | |
@assetPacks = assetPackPaths.collect{ |p| MakeFileSystemNode(p, nil, mainBundle, assetPacksPath) } | |
else | |
@assetPacks = [] | |
end | |
end | |
def payloadPath() | |
return @path + "Payload" | |
end | |
def assetPacksPath() | |
return @path + "Payload" + "OnDemandResources" | |
end | |
def symbolCachePath() | |
return @path + "Symbols" | |
end | |
def bitcodeSymbolMapsPath | |
return @path + "BCSymbolMaps" | |
end | |
# For subclasses to configure behavior | |
def supportsAssetPacks | |
return true | |
end | |
def hasAssetPacks | |
return !@assetPacks.empty? | |
end | |
def hasSymbolCache() | |
return symbolCachePath.exist? | |
end | |
def hasBitcodeSymbolMaps | |
bitcodeSymbolMapsPath.exist? | |
end | |
# For subclasses to configure behavior | |
def supportsVPNPlugins | |
return true | |
end | |
def thinnableAssetCatalogs | |
mainBundleAssetCatalogs = self.mainBundle.thinnableAssetCatalogs | |
assetPackAssetCatalogs = (self.assetPacks||[]).map{|ap| ap.thinnableAssetCatalogs }.flatten | |
return mainBundleAssetCatalogs + assetPackAssetCatalogs | |
end | |
def appThinningInfoBasename | |
return "AppThinning.plist" | |
end | |
def appThinningInfoPath | |
return path + appThinningInfoBasename | |
end | |
def appThinningInfoDisplayPath | |
return appThinningInfoPath.relative_path_from(path) | |
end | |
def appThinningInfo | |
if not defined? @appThinningInfo | |
@appThinningInfo = lambda { | |
return Hash.new unless appThinningInfoPath.exist? | |
result = CFPropertyList::native_types(LoadUserPlist(appThinningInfoPath, path).value) | |
unless result.kind_of?(Hash) | |
EmitFatalIPAPayloadValidationError("Expected dictionary in: #{appThinningInfoDisplayPath.to_s.shellescape}") | |
end | |
return result | |
}.call | |
end | |
return @appThinningInfo | |
end | |
def stripSwiftSymbols | |
if not defined? @stripSwiftSymbols | |
@stripSwiftSymbols = lambda { | |
keyName = "StripSwiftSymbols" | |
value = appThinningInfo[keyName] | |
bvalue = if value.nil? then true else plistBoolValue(value) end | |
if bvalue.nil? | |
EmitFatalIPAPayloadValidationError("Unexpected value for key #{keyName} in #{appThinningInfoDisplayPath.to_s.shellescape}: #{value}") | |
end | |
return bvalue | |
}.call | |
end | |
return @stripSwiftSymbols | |
end | |
end | |
class EmbeddedWatchIPA < IPA | |
def initialize(path, watchAppChild = nil) | |
super(path, watchAppChild) | |
end | |
def supportsAssetPacks | |
return false | |
end | |
def supportsVPNPlugins | |
return false | |
end | |
end | |
def plistBoolValue(value) | |
case value | |
when "1", "YES", 1, true | |
return true | |
when "0", "NO", 0, false | |
return false | |
else | |
return nil | |
end | |
end | |
def lipo_info(path) | |
return CmdSpec.new(locate_tool("lipo"), ["-info", path.to_s]).run(0, false, true) | |
end | |
# Returns: [String],isUniversal | |
def lipo_archs(path) | |
output = lipo_info(path) | |
if output =~ /Architectures in the fat file.*\:(.+)/m | |
return $1.split, true | |
elsif output =~ /Non-fat file.*\:(.+)/m | |
return [$1.strip], false | |
else | |
raise "Could not parse architectures from lipo:\n#{output}" | |
end | |
end | |
def file_type(path) | |
return CmdSpec.new(locate_tool("file"), ["-b", "--", path.to_s]).run(0, false, true).strip | |
end | |
OTOOL_PATH = locate_tool('otool-classic') | |
def otoolLVM(path) | |
# The options being passed to otool: | |
# -h: Display the Mach header. llvm-otool emits this when `-l` is passed, but 'otool-classic' does not, so we must pass this option. | |
# -l: Display the load commands. | |
# -v: Display verbosely (symbolically) when possible. | |
# -m: The object file names are not assumed to be in the archive(member) syntax, which allows file names containing parenthesis. | |
CmdSpec.new(OTOOL_PATH, ["-h", "-l", "-v", "-m", path.to_s]).run(0, false, true) | |
end | |
$placeholderutil = locate_tool("placeholderutil") | |
def makeWatchPlaceholderForiOS(comboApp, outputDir) | |
assetutil = locate_tool("assetutil", [comboApp.watchAppChild.platform.toolsPath]) | |
return CmdSpec.new($placeholderutil, ["--watch", assetutil.to_s, comboApp.path.to_s, outputDir.to_s]).run(1, true, false) | |
end | |
def makeiOSPlaceholderForWatch(comboApp, outputIPAPath) | |
assetutil = locate_tool("assetutil", [comboApp.platform.toolsPath]) | |
outputDir = outputIPAPath | |
return CmdSpec.new($placeholderutil, ["--ios", assetutil.to_s, comboApp.path.to_s, outputDir.to_s]).run(1, true, false) | |
end | |
# :: [[String: ...]] | |
def otoolParseLoadCommands(output) | |
loadCommands = [] | |
output.split(/Load command \d+\s*\n/).drop(1).each { |substr| | |
# Iterate over the lines in the load command. If we find a section, we deal with it. | |
next if substr == "" | |
currentLoadCommand = {} | |
currentSection = nil | |
loadCommands << currentLoadCommand | |
substr.each_line { |line| | |
# If the line is "Section", we create a new section. | |
if line.strip == "Section" | |
# It's the start of a new section. | |
(currentLoadCommand[:sections] ||= []) << (currentSection = {}) | |
else | |
# It's a key value pair, which we add to either the load command or the current section. | |
# todo: keys like "current version" and "time stamp" have spaces in them | |
k,v = line.split(' ', 2) | |
(currentSection || currentLoadCommand)[k.strip.to_sym] = v.strip | |
end | |
} | |
} | |
return loadCommands | |
end | |
# Examines the file system entity at the path, and returns either a [MachOImage] (if the path refers to | |
# a Mach-O file or a symlink to a Mach-O file) or nil (if it doesn't). The returned MachOImage objects have a path | |
# to either the single-architecture or the multi-architecture file so they can in the future This function always | |
# returns a string. | |
def GetMachOImagesFromOToolInfoForFile(path) | |
path.assert_kind_of(Pathname) | |
obj = otoolLVM(path) | |
return nil if obj =~ /\A.*: is not an object file/ | |
objspl = obj.split('Mach header') | |
assert(objspl.count > 1, "Couldn't split otool output on 'Mach header' for #{path}") | |
# If it's a universal binary, this is the fat header. If it isn't, it's just the input path. | |
obj_fat_header = objspl[0] | |
obj_slices = objspl.drop(1) | |
archs = obj_fat_header.scan(/^architecture ([^ ]+)$/).collect { |gs| groups[0] } | |
if archs.empty? | |
# If it's a universal binary, otool will tell us the archs. If it isn't universal, we need lipo. | |
archs,_ = lipo_archs(path) | |
end | |
assert(obj_slices.count == archs.count, "Expected #{archs.count} archs in otool output:\n#{obj}") | |
# Go through the architectures, and construct a Mach-O image for it. | |
machoImages = [] | |
for arch,output in archs.zip(obj_slices) | |
machOType = /[0-9]+\s+(EXECUTE|DYLIB|BUNDLE)\s+[0-9]+/.match(output)[1] rescue "???" | |
loadedDylibs = output. | |
scan(/cmd LC(_LAZY)?_LOAD(_WEAK)?_DYLIB.*?name\s+(.*?)\s+\(offset\s+\d+\)/m). | |
map{|groups| groups[2] } | |
rpaths = output. | |
scan(/cmd LC_RPATH.*?path\s+(.*?)\s+\(offset \d+\)/m). | |
map{|groups| groups[0] } | |
# Collect load commands. | |
loadCommands = otoolParseLoadCommands(output) | |
# Record the UUID, if there is one. | |
uuid = loadCommands.select{ |lc| lc[:cmd] == "LC_UUID" }.collect{ |lc| lc[:uuid] }.first | |
# Record the platform and the minimum deployment target. | |
versionMinLoadCommandName = loadCommands.select{ |lc| lc[:cmd] =~ /LC_VERSION_MIN_(.+)/ }.collect{ |lc| lc[:cmd] }.first | |
platformIdentifier = if versionMinLoadCommandName then Platform.platformIdentifierForVersionMinLoadCommand(versionMinLoadCommandName) else nil end | |
if platformIdentifier.nil? | |
versionLoadCommand = loadCommands.detect{ |lc| lc[:cmd] =~ /LC_BUILD_VERSION/ } | |
# llvm-otool emits this field in lower case, while otool-classic emits it in upper case. | |
platformIdentifier = versionLoadCommand[:platform].downcase if versionLoadCommand | |
platformIdentifier = "iphoneos" if platformIdentifier == "ios" | |
platformIdentifier = "appletvos" if platformIdentifier == "tvos" | |
end | |
EmitWarning(__LINE__, "Could not determine platform for #{arch} in #{path}") unless platformIdentifier | |
# Record whether or not there is a code signature. | |
hasCodeSignature = loadCommands.select{ |lc| lc[:cmd] == "LC_CODE_SIGNATURE" }.count > 0 | |
# Record whether or not there is executable code. | |
hasBitcode = loadCommands.select{ |lc| lc[:cmd] == "LC_SEGMENT" or lc[:cmd] == "LC_SEGMENT_64" }.find{ |lc| | |
# Select segments named "__LLVM" with sections named "__bundle". | |
lc[:segname] == "__LLVM" and lc[:sections] and lc[:sections].find{ |sc| sc[:sectname] == "__bundle"} | |
} | |
# Record whether or not there is bitcode. | |
hasExecCode = loadCommands.select{ |lc| lc[:cmd] == "LC_SEGMENT" or lc[:cmd] == "LC_SEGMENT_64" }.select{ |lc| | |
# Select "__TEXT" segments that aren't zero-sized. | |
lc[:segname] == "__TEXT" and lc[:filesize].to_i > 0 | |
}.count > 0 | |
# Finally, create a MachOImage object to represent the Mach-O image. | |
machoImages << MachOImage.new("MH_" + machOType, platformIdentifier, arch, uuid, hasBitcode, hasExecCode, hasCodeSignature, loadedDylibs, rpaths) | |
end | |
return machoImages | |
end | |
# Examines the file system entity at the path and returns the most specific kind of node that best represents it. | |
def MakeFileSystemNode(path, parentDirectory = nil, enclosingBundle = nil, parentPathForDisplay = nil) | |
# Make a quick exit if the path doesn't even exist. | |
path = Pathname(path) unless path.kind_of?(Pathname) | |
raise "Nothing at path: #{path}" unless path.exist? | |
# Otherwise, what we do depends on the type of file system entity. | |
case path.ftype | |
when "file" | |
# Check the file name suffix as a first indication of what kind of file it might be. | |
case path.extname.downcase | |
when ".car" | |
magic_type = file_type(path) | |
is_bom = magic_type == "Mac OS X bill of materials (BOM) file" | |
if is_bom | |
# If the file type is BOM, then slice accordingly, regardless of the file name (as long as it ends in .car). | |
node = FSAssetCatalogFileNode.new(path, enclosingBundle) | |
elsif path.basename.to_s.downcase == "assets.car" && path.parent == enclosingBundle.path | |
# If the file name is exactly Assets.car and the file type is not BOM, then error (especially if our platforms on devices assume that Assets.car is a BOM file). This treats Assets.car as a “reserved” file name. | |
displayPath = if parentPathForDisplay then path.relative_path_from(parentPathForDisplay) else path end | |
EmitFatalIPAPayloadValidationError("Assets.car should be an asset catalog, but is \"#{magic_type}\": #{displayPath}") | |
else | |
# If the file name is not Assets.car and the file type is not BOM, then skip (i.e., the resource.car case). | |
node = FSFileNode.new(path, enclosingBundle) | |
end | |
else | |
# Possibly a Mach-O (we'll need to check the contents to know for sure) | |
is_macho = (file_type(path) =~ /Mach-O/) rescue nil | |
machoImages = if is_macho then GetMachOImagesFromOToolInfoForFile(path) else nil end | |
if machoImages && !machoImages.empty? && (!enclosingBundle || machoImages.first.platformIdentifier == enclosingBundle.statedPlatformIdentifier) | |
# Instantiate a Mach-O File Node and, if there is an enclosing bundle, add it to the list. | |
node = FSMachOFileNode.new(path, enclosingBundle, machoImages) | |
enclosingBundle.machoFiles << node if enclosingBundle | |
else | |
# Otherwise we consider it a regular File Node. | |
node = FSFileNode.new(path, enclosingBundle) | |
end | |
end | |
return node | |
when "directory" | |
# Check if this seems to be a bundle; if so, we get back the Info.plist contents. | |
infoDict = FSBundleDirectoryNode.getInfoPlistIfPathIsBundle(path, parentPathForDisplay) | |
if infoDict | |
# It's a bundle. We instantiate a node, connect it to any enclosing bundle, and set it as the current one. | |
node = FSBundleDirectoryNode.new(path, enclosingBundle, infoDict) | |
enclosingBundle.nestedBundles << node if enclosingBundle | |
bundle = node | |
else | |
# Not a bundle, so just create a regular directory node. | |
node = FSDirectoryNode.new(path, enclosingBundle) | |
bundle = enclosingBundle | |
end | |
# Descend the directory hierarchy. | |
for cpath in path.children.sort | |
subnode = MakeFileSystemNode(cpath, node, bundle, parentPathForDisplay) | |
node.subnodes << subnode if subnode | |
end | |
# For a bundle directory, we do some post-processing after reading the subtree. | |
if node.kind_of?(FSBundleDirectoryNode) | |
# Look up the CFBundleExecutable key in the Info dictionary. | |
mainExecName = node.infoDict['CFBundleExecutable'] | |
# If there is a value, we find it in the list of Mach-Os, and set it as the bundle's main Mach-O. | |
if mainExecName | |
bundle.mainMachoFile = node.subnodes.detect{ |n| n.name == mainExecName } || node.subnodes.detect{ |n| n.name.unicode_equal?(mainExecName) } | |
end | |
node.initialized = true | |
end | |
# Return the directory node (or specialization of directory node, e.g. bundle node). | |
return node | |
when "link" | |
# Create a symbolic link node. | |
return FSSymlinkNode.new(path, enclosingBundle) | |
else | |
# Any other kind of entity is treated plainly (should we also warn?). | |
return FSNode.new(path, enclosingBundle) | |
end | |
end | |
def WalkBundle(bundle, level = 0) | |
log { "#{" " * level}[#{bundle}] #{bundle.infoDict["CFBundleIdentifier"]} #{bundle.infoDict["CFBundleSupportedPlatforms"]}" } | |
for machoFile in bundle.machoFiles.each | |
log { "#{" " * level} #{machoFile}" } | |
for machoImage in machoFile.machoImages | |
log { "#{" " * level} #{machoImage.type} #{machoImage.arch} #{machoImage.uuid}" } | |
end | |
end | |
for nestedBundle in bundle.nestedBundles | |
WalkBundle(nestedBundle, level + 1) | |
end | |
end | |
def WalkFiles(node, level = 0) | |
log { "#{" " * level}#{node.class} #{node}" } | |
for subnode in (node.subnodes || []) | |
WalkFiles(subnode, level + 1) | |
end | |
end | |
# Validate an unpacked IPA, emitting issues to the JSON. | |
def ValidateIPA(ipa) | |
log { "Validating IPA structure..." } | |
# Then check the main bundle (and any nested subbundles). | |
ValidatePayloadBundle(ipa.mainBundle, ipa) | |
bundleDisplayPath = ipa.mainBundle.path.relative_path_from(ipa.payloadPath) | |
deploymentTarget = ipa.mainBundle.deploymentTarget | |
EmitIPAPayloadValidationError("Failed to parse MinimumOSVersion from “#{bundleDisplayPath}/Info.plist”") if | |
!deploymentTarget || deploymentTarget.to_s == "" || !Gem::Version.correct?(deploymentTarget.to_s) | |
end | |
# Validate a bundle in the payload of an unpacked IPA, emitting issues to the JSON. | |
def ValidatePayloadBundle(bundle, ipa, level = 0) | |
bundleDisplayPath = bundle.path.relative_path_from(ipa.payloadPath) | |
# Check the specific bundle we're given. | |
EmitIPAPayloadValidationError("Bundle “#{bundleDisplayPath}” does not specify a platform identifier in its Info.plist") unless bundle.platformIdentifier.nil_if_empty | |
EmitIPAPayloadValidationError("Info.plist of “#{bundleDisplayPath}” specifies a simulator platform for the CFBundleSupportedPlatforms key") if bundle.platformIdentifier.end_with?("simulator") | |
platform = Platform.platformForIdentifer(bundle.platformIdentifier) | |
EmitIPAPayloadValidationError("Bundle “#{bundleDisplayPath}” specifies an unknown platform in its Info.plist") unless platform | |
EmitIPAPayloadValidationError("Info.plist of “#{bundleDisplayPath}” specifies WKWatchKitApp=1 but it is an iOS WatchKit extension") if | |
bundle.infoDict['CFBundleSupportedPlatforms'] == ["iPhoneOS"] && | |
bundle.infoDict["WKWatchKitApp"] == true && | |
bundle.infoDict["NSExtension"].kind_of?(Hash) && | |
bundle.infoDict["NSExtension"]["NSExtensionPointIdentifier"] == "com.apple.watchkit" | |
EmitIPAPayloadValidationError("Info.plist of “#{bundleDisplayPath}” should specify UIDeviceFamily with an array containing one or more entries") if bundle.supportedIdioms.nil_if_empty.nil? | |
if bundle.isBitcodeCompilationRootBundle | |
# It's a main bundle for its platform. As such, we expect it to have a main Mach-O. | |
mainMachoFile = bundle.bitcodeCompilationRootMachO | |
if mainMachoFile.nil? | |
EmitIPAPayloadValidationError("main bundle #{bundle.path.relative_path_from(ipa.payloadPath)} doesn't have a main Mach-O file") | |
elsif !mainMachoFile.kind_of?(FSMachOFileNode) | |
log { "Main bundle '#{bundle.path.to_s}' : '#{bundle.platform.identifier}' main mach-o '#{mainMachoFile.path.to_s}' is of type '#{mainMachoFile.class.name}'" } | |
EmitIPAPayloadValidationError("main bundle #{bundle.path.relative_path_from(ipa.payloadPath)} specifies a main Mach-O file ('#{mainMachoFile.name}') that isn't an actual Mach-O") | |
end | |
end | |
if OPTIONS.validateBitcode && !OPTIONS.compileBitcode | |
if platform | |
for machoFile in bundle.machoFiles | |
if machoFile.machoImages.any?{|mi| mi.hasBitcode } | |
begin | |
CmdSpec.new(locate_tool("python3"), | |
[ | |
locate_tool("bitcode-build-tool"), | |
"-t", (OPTIONS.toolchainDir + "bin").to_s, | |
"--sdk", platform.sdkPath.to_s, | |
'--verify', machoFile.path.to_s, | |
]).run(level) | |
rescue CmdSpec::NonZeroExitException => ex | |
if ex.status.exitstatus == 1 | |
EmitIPAPayloadValidationError("Failed to verify bitcode in #{machoFile.path.relative_path_from(machoFile.path.parent.parent)}:\nStdout:\n#{ex.out_str}\nStderr:\n#{ex.err_str}") | |
else | |
raise | |
end | |
end | |
end | |
end | |
end | |
end | |
# Recurse through any nested bundles (including, possibly, those of other platforms). | |
for subbundle in bundle.nestedBundles | |
ValidatePayloadBundle(subbundle, ipa, level + 1) | |
end | |
end | |
def ValidateOutputIPA(srcIPA, outputIPA, options) | |
# There should be no AppThinning plist | |
if outputIPA.appThinningInfoPath.exist? | |
EmitIPAOutputValidationError("Should have been removed: #{outputIPA.appThinningInfoPath.to_s.shellescape}") | |
end | |
# If we've changed the deployment target, it should have 2+ components for binary compatibility | |
srcDeploymentTargetString = srcIPA.mainBundle.infoDict["MinimumOSVersion"] | |
deploymentTargetString = outputIPA.mainBundle.infoDict["MinimumOSVersion"] | |
if srcDeploymentTargetString != deploymentTargetString && (deploymentTargetString.nil? || !deploymentTargetString.kind_of?(String) || deploymentTargetString.split('.').count < 2) | |
EmitIPAOutputValidationError("Expected a MinimumOSVersion with 2+ dot-separated components but got '#{deploymentTargetString}' in #{outputIPA.mainBundle.path.relative_path_from(outputIPA.path).to_s}") | |
end | |
deploymentTarget = Gem::Version.new(deploymentTargetString) | |
expectDTAppStoreToolsBuild = true | |
# Check Info.plist whitelist for older watchOS | |
if outputIPA.mainBundle.platform.identifier == "watchos" && deploymentTarget < $watchOSInfoPlistWhitelistEndVersion | |
expectDTAppStoreToolsBuild = false | |
infoKeys = Set.new(outputIPA.mainBundle.infoDict.keys) | |
infoKeys.subtract($watchOSInfoPlistWhitelist) | |
if infoKeys.count != 0 | |
EmitIPAOutputValidationError("Expected Info.plist within watchOS whitelist but found the following extraneous keys: #{infoKeys}") | |
end | |
end | |
# We should insert DTAppStoreToolsBuild | |
if expectDTAppStoreToolsBuild | |
appStoreToolsBuildVersion = outputIPA.mainBundle.infoDict["DTAppStoreToolsBuild"] | |
if appStoreToolsBuildVersion == nil | |
EmitIPAOutputValidationError("Expected Info.plist:DTAppStoreToolsBuild in #{outputIPA.mainBundle.path.relative_path_from(outputIPA.path).to_s}") | |
else | |
unless appStoreToolsBuildVersion =~ APP_STORE_TOOLS_BUILD_VERSION_PATTERN | |
EmitIPAOutputValidationError("Expected #{appStoreToolsBuildVersion} to match #{APP_STORE_TOOLS_BUILD_VERSION_PATTERN}, e.g. 10A208 or 10A208a") | |
end | |
end | |
end | |
# We should have no remaining bitcode | |
for machO in outputIPA.mainBundle.enumerateTree.select{|n| n.kind_of?(FSMachOFileNode) && n.machoImages.any?{|i| i.hasBitcode } } | |
imgs = machO.machoImages.collect { |i| "#{i.arch}:#{i.uuid}" }.sort.join(" ") | |
EmitIPAOutputValidationError("Found bitcode in #{machO.path} #{imgs}") | |
end | |
# If we compiled bitcode, we should have symbols | |
if options.compileBitcode | |
dSYMsDir = outputIPA.path + "dSYMs" | |
dSYMs = if Dir.exist?(dSYMsDir) then Set.new(Dir.entries(dSYMsDir)) else Set.new() end | |
symbolsDir = outputIPA.path + "Symbols" | |
symbols = if Dir.exist?(symbolsDir) then Set.new(Dir.entries(symbolsDir)) else Set.new() end | |
srcMachOs = srcIPA.mainBundle.enumerateTree.select{|n| n.kind_of?(FSMachOFileNode) } | |
for machO in outputIPA.mainBundle.enumerateTree.select{|n| n.kind_of?(FSMachOFileNode) } | |
correspondingInputPath = machO.path.relative_path_from(outputIPA.mainBundle.path) | |
correspondingInputMachO = srcMachOs.detect{|n| n.path.relative_path_from(srcIPA.mainBundle.path) == correspondingInputPath } | |
assert(correspondingInputMachO, "Could not find macho node with path: #{correspondingInputPath} in #{srcMachOs.map{|n| n.path.relative_path_from(srcIPA.path).to_s }}") | |
for image in machO.machoImages | |
correspondingInputImage = correspondingInputMachO.machoImages.detect{|i| i.arch == image.arch } | |
if correspondingInputImage.nil? && options.translateWatchOS && image.arch == "arm64_32" | |
correspondingInputImage = correspondingInputMachO.machoImages.detect{|i| i.arch == "armv7k" } | |
end | |
assert(correspondingInputImage, "Couldn't find image for #{image.arch} among #{correspondingInputMachO.machoImages.map{|i|i.arch}}") | |
if correspondingInputImage.shouldCompileBitcode(options) | |
EmitIPAOutputValidationError("Could not find dSYM for: #{machO.path} : #{image.arch} : #{image.uuid}") unless dSYMs.include?(image.uuid + ".dSYM") | |
EmitIPAOutputValidationError("Could not find symbols for: #{machO.path} : #{image.arch} : #{image.uuid}") unless symbols.include?(image.uuid + ".symbols") | |
end | |
end | |
end | |
end | |
end | |
def CreateAssetPackManifest(assetPackDirPath, assetPackManifestFilePath, urlPrefix, parentPathForDisplay = nil) | |
taskId = EmitEventLogStartTrace("Create Asset Pack Manifest", { :assetPackPath => assetPackDirPath, :assetPackManifestPath => assetPackManifestFilePath }) | |
# Given a directory full of asset packs, go through them and create a manifest from them. | |
assetPackManifestDicts = [] | |
for assetPackPath in assetPackDirPath.children.sort | |
# Skip it if it isn't an asset pack. | |
next unless assetPackPath.extname == ".assetpack" | |
# Load the Info.plist from the asset pack. | |
infoPlist = LoadUserPlist(assetPackDirPath + assetPackPath.basename + "Info.plist", parentPathForDisplay) | |
nativeInfoPlist = CFPropertyList::native_types(infoPlist.value) unless infoPlist == nil | |
# Here we should check that we have an Info.plist, and that it's well formed. | |
next unless nativeInfoPlist | |
# Add a dictionary entry to the AssetPackManifest.plist array. | |
assetPackManifestDicts << { | |
URL: urlPrefix + assetPackPath.basename.to_s, | |
bundleKey: nativeInfoPlist["CFBundleIdentifier"] || "", | |
isStreamable: true, | |
} | |
end | |
# Emit the AssetPackManifest.plist. | |
outputAssetPackManifestPlist = CFPropertyList::List.new | |
outputAssetPackManifestPlist.value = CFPropertyList.guess({ "resources" => assetPackManifestDicts }) | |
outputAssetPackManifestPlist.save(assetPackManifestFilePath, CFPropertyList::List::FORMAT_XML) | |
EmitEventLogStopTrace(taskId, "Create Asset Pack Manifest") | |
end | |
# Creates an output IPA. If thinning traits are provided, the output is thinned; otherwise, it's copied as-is. Asset | |
# packs can optionally be extracted, and if an URL prefix is provided, an AssetPackManifest.plist can be created. This | |
# function returns a structure of information describing the created outputs. This function doesn't deal with bitcode | |
# in any way; it assumes that bitcode has already been compiled, if needed. The destination path is first removed if | |
# it exists, and any ancestor directories are created, if needed. | |
def CreateOutputIPA(srcIPA, cleanSrcIPA, dstPath, thinningContext, extractAssetPacks = false, assetPackManifestURLPrefix = nil, createPlaceholderBundle = false) | |
# Create a result struct to contain the output describing the outputs. It always contains at least the path. | |
outputInfo = OpenStruct.new | |
outputInfo.path = dstPath | |
traitsUsed = thinningContext.traits ? thinningContext.traits.to_s : "universal" | |
variantsUsed = thinningContext.variants.nil_if_empty ? thinningContext.variants.collect{|x|x.to_s}.sort.join(", ") : "all variants" | |
log { "Creating #{traitsUsed} IPA contents for #{variantsUsed}" } | |
taskId = EmitEventLogStartTrace("Create Output IPA", { :ipa => srcIPA.path, :destination => dstPath, :traits => traitsUsed, :variants => variantsUsed }) | |
# Create the output directory, and any ancestor directories. | |
assert(!dstPath.exist?, "Duplicate output: #{dstPath.to_s}") | |
log { " Creating directory #{dstPath.to_s.shellescape}" } | |
FS.mkdir_p(dstPath) | |
# Copy the main payload bundle. Whether or not it will be thinned as it is being copied depends on whether or not | |
# we were given any thinning traits. | |
log { " #{thinningContext.traits ? "Thinning" : "Copying"} #{srcIPA.mainBundle.name} output payload directory" } | |
srcIPA.mainBundle.copyToPath(dstPath + "Payload" + srcIPA.mainBundle.name, thinningContext, 2) | |
srcIPA.vpnPlugins.each{ |vpnPlugin| | |
# traits is intentionally nil. vpnPlugins are signed and we cannot break their signature. | |
vpnPlugin.copyToPath(dstPath + "Payload" + vpnPlugin.name, ThinningContext.new(nil, nil, nil, dstPath, thinningContext.bitcodeContext, thinningContext.inputOutputMap), 2) | |
} | |
# Update the Info.plist of the main payload bundle. | |
# We copy only the Info.plist into the placeholder bundle. | |
log { " Updating Info.plist" } if thinningContext.traits | |
dstInfoPlistPath = dstPath + "Payload" + srcIPA.mainBundle.name + "Info.plist" | |
infoPlist = LoadPlist(dstInfoPlistPath) | |
nativeInfoPlist = CFPropertyList::native_types(infoPlist.value) unless infoPlist == nil | |
inputDeploymentTarget = srcIPA.mainBundle.deploymentTarget | |
deploymentTarget = inputDeploymentTarget | |
if thinningContext.variants.nil_if_empty | |
variantDeploymentTarget = thinningContext.variants.min { |a,b| a.traits.deploymentTarget <=> b.traits.deploymentTarget }.traits.deploymentTarget | |
deploymentTarget = [inputDeploymentTarget, variantDeploymentTarget].max | |
if deploymentTarget != inputDeploymentTarget | |
components = deploymentTarget.to_s.split('.') | |
if components.count < 2 | |
components << [0] | |
end | |
nativeInfoPlist[:MinimumOSVersion] = components.join('.') | |
end | |
end | |
requiresWatchInfoPlistWhitelist = srcIPA.mainBundle.platform.identifier == "watchos" && deploymentTarget < $watchOSInfoPlistWhitelistEndVersion | |
unless requiresWatchInfoPlistWhitelist | |
nativeInfoPlist[:DTAppStoreToolsBuild] = OPTIONS.appStoreToolsBuildVersion.as(String) | |
nativeInfoPlist[:UISupportedDevices] = thinningContext.variants.collect{|x|x.device.productType}.sort.uniq if thinningContext.variants.nil_if_empty | |
end | |
modifiedInfoPlist = CFPropertyList::List.new | |
modifiedInfoPlist.value = CFPropertyList.guess(nativeInfoPlist) | |
FS.rm_f(dstInfoPlistPath) | |
modifiedInfoPlist.save(dstInfoPlistPath, CFPropertyList::List::FORMAT_XML) | |
# Extract asset packs, if appropriate. | |
if srcIPA.hasAssetPacks | |
# See if we're supposed to extract them. | |
if extractAssetPacks | |
# We're extracting asset packs into a directory next to the product, with a "-assetpacks" suffix. | |
assetPackOutputDir = dstPath.parent + (dstPath.basename.to_s + "-assetpacks") | |
log { " Extracting #{thinningContext.traits ? "thinned" : "unmodified"} asset packs to #{assetPackOutputDir.to_s.shellescape}" } | |
for assetPack in srcIPA.assetPacks | |
log { " #{assetPack.name.to_s.shellescape}" } | |
assetPack.copyToPath(assetPackOutputDir + assetPack.name, thinningContext, 2) | |
end | |
outputInfo.assetPackOutputDir = assetPackOutputDir | |
# Emit the AssetPackManifest.plist file, if appropriate. | |
if assetPackManifestURLPrefix | |
# We will put the asset pack manifest plist into the asset pack output directory. | |
assetPackManifestFilePath = assetPackOutputDir + "AssetPackManifest.plist"; | |
# Now create it from the asset pack directory. | |
log { " Creating asset pack manifest plist at #{assetPackManifestFilePath.to_s.shellescape}" } | |
CreateAssetPackManifest(assetPackOutputDir, assetPackManifestFilePath, assetPackManifestURLPrefix, srcIPA.payloadPath) | |
# Make a note of where the asset pack manifest is. | |
outputInfo.assetPackManifestPath = assetPackManifestFilePath | |
else | |
log { " (not creating an asset pack manifest plist, because no manifest url prefix was provided)" } | |
end | |
else | |
# We're not extracting asset packs, but the original contained them, so we copy them over. | |
assetPackOutputDir = dstPath + "Payload" + "OnDemandResources" | |
log { " Copying #{thinningContext.traits ? "thinned" : "unmodified"} asset packs in #{assetPackOutputDir.to_s.shellescape}" } | |
for assetPack in srcIPA.assetPacks | |
log { " #{assetPack.name.to_s.shellescape}" } | |
assetPack.copyToPath(assetPackOutputDir + assetPack.name, thinningContext, 2) | |
end | |
end | |
end | |
# Create the application placeholder bundle, if requested. | |
if createPlaceholderBundle | |
# The path of the directory containing the placeholder is next to the product, with a "-placeholder" suffix. | |
placeholderOutputDir = dstPath.parent + (dstPath.basename.to_s + "-placeholder") | |
log { " Creating placeholder app wrapper at #{placeholderOutputDir.to_s.shellescape}" } | |
# We create a placeholder bundle inside the directory. | |
placeholderAppPath = placeholderOutputDir + srcIPA.mainBundle.name | |
log { " Creating main bundle directory #{placeholderAppPath.basename.to_s.shellescape}" } | |
FS.mkdir_p(placeholderAppPath) | |
# We copy only the Info.plist into the placeholder bundle. | |
log { " Creating placeholder Info.plist" } | |
infoPlist = LoadUserPlist(srcIPA.mainBundle.infoPropertyListPath, srcIPA.payloadPath) | |
nativeInfoPlist = CFPropertyList::native_types(infoPlist.value) unless infoPlist == nil | |
modifiedInfoPlist = CFPropertyList::List.new | |
modifiedInfoPlist.value = CFPropertyList.guess(nativeInfoPlist) | |
modifiedInfoPlist.save(placeholderAppPath + "Info.plist", CFPropertyList::List::FORMAT_XML) | |
# Record the placeholder bundle's path in the output info descriptor. | |
outputInfo.placeholderAppPath = placeholderAppPath | |
end | |
# watchKit2ContaineriOSApp is only set for Watch apps extracted from combo apps | |
if srcIPA.mainBundle.watchKit2ContaineriOSApp && srcIPA.mainBundle.deploymentTarget < OPTIONS.watchOSVersionRemovedPlaceholders | |
# (Note: we *do* want to check the source app deployment target, not the thinned variant's deployment target) | |
makeiOSPlaceholderForWatch(srcIPA.mainBundle.watchKit2ContaineriOSApp, dstPath) | |
end | |
if OPTIONS.validateOutput | |
ValidateOutputIPA(cleanSrcIPA, IPA.new(dstPath), OPTIONS) | |
end | |
EmitEventLogStopTrace(taskId, "Create Output IPA") | |
return outputInfo | |
end | |
# We need to compile the bitcode once for each architecture that contains bitcode, and we need to do so in reverse | |
# dependency order. This is because a Mach-O can only be compiled when any dynamic library on which it depends has | |
# already been compiled (or existed as executable code in the first place). So we use a two-pass approach: first | |
# we descend down through the bundle hierarchy, building up lists of Mach-O images to compile. As we cross platform | |
# boundaries, we reevaluate whether to compile or strip bitcode. We compile if we have been asked to do so and if | |
# the bundle's main executable contains bitcode, otherwise we strip it. This decision is made for each architec- | |
# ture, so we end up with two sets: architectures to compile and architectures to strip. Those two sets should be | |
# disjoint. Any architecture that doesn't end up in either set should be omitted. Mach-O images to that should be | |
# stripped can be stripped immediately, but those that should be compiled have to be added to a list and compiled | |
# later, as noted above (we have to compile them in the right order). | |
def CompileOrStripBitcodeInBundle(bundle, ipa, bitcodeContext, machoImagesToCompile = nil, machoFilesToReassemble = nil, level = 0) | |
options = OPTIONS | |
taskId = EmitEventLogStartTrace("#{options.compileBitcode ? "Compiling" : "Stripping"} Bitcode", { :bundle => bundle.path, :ipa => ipa.path } ) | |
# If we're at the top level or if we are crossing into a new platform (i.e. if we area looking at a "main bundle" | |
# for the platform), we reset our idea of the sets of architectures for which to compile or to strip bitcode. | |
if bundle.isBitcodeCompilationRootBundle | |
# Start a new list of Mach-O images to compile. Note that this does not affect the lists that are already | |
# being built up by callers. | |
machoImagesToCompile = [] | |
# We also want to keep track of the Mach-O files that we will want to reassemble from compiled or stripped | |
# pieces. | |
machoFilesToReassemble = [] | |
end | |
# Now go through the Mach-O files in the bundle, looking at each image in turn. | |
log { " #{"| " * level}#{bundle.path.relative_path_from(ipa.payloadPath)} (identifier: #{bundle.identifier}, platform: #{bundle.platformIdentifier})" } | |
for machoFile in bundle.machoFiles | |
# Now go through the Mach-O images in the file, either compiling, stripping, or excluding as appropriate | |
# based on its architecture. We replace the list of Mach-O images with a possibly smaller list. | |
log { " #{"| " * level}#{machoFile.type.downcase} '#{machoFile.path.relative_path_from(ipa.payloadPath)}'..." } | |
adjustedMachoImages = [] | |
for machoImage in machoFile.machoImages | |
# Exclude this Mach-O image if its architecture isn't one of the ones we want. | |
if machoImage.shouldExcludeFromOutput | |
log { " #{"| " * level}.#{machoImage.arch} (exclude)" } | |
next | |
end | |
# Otherwise, we first make sure there's a thin Mach-O image file (this might | |
# use lipo, or might just copy (or hard-link) the file if we already have a thin slice). | |
machoImageInputPath = machoImage.thinnedInputPath(ipa, options) | |
FS.mkdir_p(machoImageInputPath.parent) | |
machoImageOutputPath = machoImage.thinnedOutputPath(ipa, options) | |
FS.mkdir_p(machoImageOutputPath.parent) | |
shouldCompileBitcode = nil | |
# Some Watch apps are missing bitcode in their payload Swift libs, but do have bitcode in the archive's SwiftSupport | |
if options.compileBitcode && !machoImage.hasBitcode && bundle.isWatchKitStubApp && machoFile.platform.identifier == "watchos" && machoFile.isSwiftRuntimeDylib | |
replacementFile = ipa.path + "SwiftSupport" + "watchos" + machoFile.path.basename | |
EmitFatalIPAPayloadValidationError("missing bitcode in #{machoFile.path.to_s.shellescape} : #{machoImage.arch} and no replacement found in SwiftSupport in #{replacementFile.to_s.shellescape}") unless File.exist?(replacementFile.to_s) | |
replacementMachoFile = MakeFileSystemNode(replacementFile, nil, nil, nil).as(FSMachOFileNode, replacementFile.to_s) | |
replacementMachoImage = replacementMachoFile.machoImages.detect { |i| i.arch == machoImage.arch } | |
EmitFatalIPAPayloadValidationError("missing bitcode in both #{machoFile.path.to_s.shellescape} : #{machoImage.arch} and #{replacementMachoFile.path.to_s.shellescape} (missing arch)") unless replacementMachoImage | |
EmitFatalIPAPayloadValidationError("missing bitcode in both #{machoFile.path.to_s.shellescape} : #{machoImage.arch} and #{replacementMachoFile.path.to_s.shellescape} : #{replacementMachoImage.arch}") unless replacementMachoImage.hasBitcode | |
replacementMachoImage.thinToPath(machoImageInputPath, level) | |
shouldCompileBitcode = true | |
else | |
machoImage.thinToPath(machoImageInputPath, level) | |
shouldCompileBitcode = machoImage.shouldCompileBitcode(options) | |
end | |
# What we do next depends on whether or not the Mach-O has bitcode. | |
if shouldCompileBitcode | |
# Because we have to compile Mach-Os in dependency order, we defer the actual compilation. | |
log { " #{"| " * level}.#{machoImage.arch} (compile)" } | |
machoImagesToCompile << machoImage | |
elsif machoImage.shouldStripBitcode(options) | |
# But we can strip bitcode from the binary right away (no need to defer). We do so in-place. | |
log { " #{"| " * level}.#{machoImage.arch} (strip)" } | |
assert(!machoImageOutputPath.exist?, "Duplicate output: #{machoImageOutputPath.to_s}") | |
CmdSpec.new(locate_tool("bitcode_strip", [machoImage.platform.toolsPath]), [ "-r", "-o", machoImageOutputPath, machoImageInputPath ]).run(level) | |
# Strip Swift symbols from Swift dylibs | |
if machoImage.machoFile.isSwiftRuntimeDylib && ipa.stripSwiftSymbols | |
CmdSpec.new(locate_tool("strip", [machoImage.platform.toolsPath]), [ "-ST", machoImageOutputPath ]).run(level) | |
end | |
else | |
# Otherwise, the Mach-O image doesn't have bitcode, so we strip swift symbols. | |
# Strip Swift symbols from Swift dylibs | |
if machoImage.machoFile.isSwiftRuntimeDylib && ipa.stripSwiftSymbols | |
assert(!machoImageOutputPath.exist?, "Duplicate output: #{machoImageOutputPath.to_s}") | |
CmdSpec.new(locate_tool("strip", [machoImage.platform.toolsPath]), [ "-ST", "-o", machoImageOutputPath, "-", machoImageInputPath ]).run(level) | |
verb = "strip-swift-dylib" | |
else | |
assert(!machoImageOutputPath.exist?, "Duplicate output: #{machoImageOutputPath.to_s}") | |
FS.cp(machoImageInputPath, machoImageOutputPath) | |
verb = "copy" | |
end | |
log { " #{"| " * level}.#{machoImage.arch} (#{verb})" } | |
end | |
# Add the Mach-O image as one that will appear in the universal Mach-O. | |
adjustedMachoImages << machoImage | |
end # images | |
# Set the list of adjusted Mach-O images as the Mach-O file's new list (excluding any unused images). (todo stop mutating inputs / macho) | |
machoFile.machoImages = adjustedMachoImages | |
# Also remember that we'll need to re-lipo this Mach-O file for the universal binary. | |
machoFilesToReassemble << machoFile | |
end # files | |
# Recurse through any subbundles (including, possibly, those of other platforms). We pass down or context | |
# and accumulator containers. | |
for subbundle in bundle.nestedBundles | |
CompileOrStripBitcodeInBundle(subbundle, ipa, bitcodeContext, machoImagesToCompile, machoFilesToReassemble, level + 1) | |
end | |
# If we're a main bundle, we do the postprocessing, now that we've dealt with all Mach-Os and all subbundles. | |
# This includes compiling any bitcode files that need it (in library dependency order), and reassembling the | |
# Mach-O files for use in the universal binary. | |
if bundle.isBitcodeCompilationRootBundle | |
# Now go through the list of Mach-O images we need to compile. We process them one architecture at a time. | |
# We have already copied bitcode-less executables into a single per-architecture directory, so the bitcode | |
# compiler will be able to find them. | |
archsToCompile = if options.compileBitcode then bundle.bitcodeCompilationRootMachO.machoImages.select{|i| i.hasBitcode}.map{|i| i.arch} else [] end | |
if options.compileBitcode && options.translateWatchOS && archsToCompile == ["armv7k"] && bundle.platform.identifier == "watchos" | |
archsToCompile << "arm64_32" | |
end | |
for arch in archsToCompile | |
# Get the list of Mach-O images that match the current architecture. | |
log { " #{"| " * level}Compiling bitcode for #{arch}..." } | |
remainingMachoImagesForArch = machoImagesToCompile.select{ |img| img.arch == arch }.nil_if_empty || machoImagesToCompile.select{ |img| img.arch == "armv7k" && arch == "arm64_32" } | |
# Find the first Mach-O image that doesn't link against any Mach-O images that we still need to compile. | |
while not remainingMachoImagesForArch.empty? | |
# We still have Mach-O images to compile, so we need to find the next one whose dependencies have all | |
# been resolved. | |
namesOfRemainingMachOs = remainingMachoImagesForArch.select{|image| image.isDylib }.map{ |image| image.machoFile.name } | |
machoImage = remainingMachoImagesForArch.select{ |image| (namesOfRemainingMachOs & image.dylibNames) == [] }.first | |
# If we found a Mach-O image, we compile it. If not, it means we hit a dependency cycle. | |
if machoImage | |
# Compile the bitcode, requesting that a .dSYM also be created in the output directory. If the IPA has | |
# a bitcode symbol deobfuscation map, we also pass it in now. | |
machoName = machoImage.machoFile.name | |
log { " #{"| " * level} Compiling #{arch} bitcode in '#{machoName.to_s.shellescape}'" } | |
bitcodeSymbolMap = ipa.bitcodeSymbolMapsPath if ipa.hasBitcodeSymbolMaps | |
platformIdent = machoImage.platformIdentifier | |
platform = Platform.platformForIdentifer(platformIdent) | |
FatalError(__LINE__, "failed to compile bitcode for #{machoName} because we couldn't find the platform with identifier '#{platformIdent}'", platformIdent) unless platform | |
machoImageOutputPath = machoImage.thinnedOutputPath(ipa, options, arch) | |
assert(!machoImageOutputPath.exist?, "Duplicate output: #{machoImageOutputPath.to_s}") | |
FS.mkdir_p(machoImageOutputPath.parent) | |
# Determine the name of the dSYM file we want. Initially we don't know its UUID, so we pass in a fixed name, and then we'll rename it once it's been created. | |
dsymFile = Pathname(machoImageOutputPath.to_s + ".dSYM") | |
assert(!dsymFile.exist?, "Duplicate output: #{dsymFile.to_s}") | |
translating_watch = machoImage.arch == "armv7k" && arch == "arm64_32" | |
# Invoke the bitcode-build-tool(1) tool to compile and link the bitcode in the thin Mach-O file. | |
begin | |
cmdln = [locate_tool("bitcode-build-tool")] | |
cmdln += [ "-v" ] | |
cmdln += [ "-t", options.toolchainDir + "bin" ] | |
cmdln += ipa.linkageGraph[machoImage].map{|img| img.thinnedOutputPath(ipa, options, arch).parent }.uniq.map{|p| ["-L", p] }.flatten | |
cmdln += [ "--sdk", platform.sdkPath ] | |
cmdln += [ "-o", machoImageOutputPath ] | |
cmdln += [ "--generate-dsym", dsymFile ] | |
cmdln += [ "--symbol-map", bitcodeSymbolMap ] if bitcodeSymbolMap | |
cmdln += [ "-j", options.bitcodeCompilationJFactor ] if options.bitcodeCompilationJFactor and options.bitcodeCompilationJFactor > 0 | |
for opt in options.bitcodeOptions | |
cmdln += [ opt ] | |
end | |
if ipa.stripSwiftSymbols | |
cmdln += [ "--strip-swift-symbols" ] | |
end | |
if translating_watch | |
cmdln += [ "--translate-watchos" ] | |
end | |
cmdln += [ machoImage.thinnedInputPath(ipa, options) ] | |
CmdSpec.new(locate_tool("python3"), cmdln).run(level) | |
end | |
outputImages = GetMachOImagesFromOToolInfoForFile(machoImageOutputPath) | |
assert(outputImages.count == 1, "Expected a single output after bitcode recompilation") | |
outputImage = outputImages[0] | |
outputUUID = outputImage.uuid | |
log { "bitcode-build-tool built #{machoImageOutputPath} #{outputImage.arch}:#{outputUUID} from #{machoImage.arch}:#{machoImage.uuid} " } | |
# Invoke the symbols(1) tool to create a symbol cache. | |
begin | |
assert(dsymFile.exist?) | |
# We need this tmp dir to stay around until output IPAs are done | |
symtmp = Pathname(Dir.mktmpdir("symcache-#{machoName}", OPTIONS.tmpDir)) | |
cmdln = [] | |
cmdln += [ "-failIfMissingDsym" ] | |
cmdln += [ "-symbolsPackageDir", symtmp.to_s ] | |
cmdln += [ machoImageOutputPath ] | |
CmdSpec.new(locate_tool("symbols", [machoImage.platform.toolsPath]), cmdln).run(level) | |
# We successfully created the symbol cache file(s). We don't get told their names, so we scan them all, and figure out which ones are new. | |
newSymcacheFiles = symtmp.children | |
assert(newSymcacheFiles.count == 1, "Expected a single new .symbols file") | |
symcacheFile = newSymcacheFiles[0] | |
assert(symcacheFile.extname == ".symbols") | |
bitcodeContext.symbolsStore.addSymbols(outputUUID, symcacheFile) | |
end | |
# If bitcode-build-tool(1) created a .dSYM file, we need to look in it to determine its UUID. Then we rename to it have that UUID. | |
if dsymFile | |
dsymBinaryPath = dsymFile + "Contents" + "Resources" + "DWARF" + machoName | |
dsymImages = GetMachOImagesFromOToolInfoForFile(dsymBinaryPath) | |
assert(dsymImages.count == 1, "Expected a single slice dsym after bitcode recompilation") | |
dsymImageForArch = dsymImages.detect{|i| i.arch == arch } | |
assert(dsymImageForArch, "Couldn't find image for arch '#{arch}' in '#{dsymImages.map{|i|i.arch}}'") | |
uuid = dsymImageForArch.uuid | |
assert(uuid == outputUUID) | |
if uuid.nil_if_empty.nil? | |
FatalError(__LINE__, "failed to determine UUID of dSYM file #{dsymFile.to_s.shellescape}") | |
else | |
# We were able to figure out the path of the dSYM, so we can rename the output file to it. We annotate it with the architecture. | |
newFileName = (uuid + ".dSYM") | |
log { " #{"| " * level} Renaming '#{dsymFile}' -> '#{newFileName}')" } | |
newDSYMFile = dsymFile.parent + newFileName | |
assert(!newDSYMFile.exist?, "Duplicate output: #{newDSYMFile.to_s}") | |
FS.mv(dsymFile, newDSYMFile) | |
dsymFile = newDSYMFile | |
bitcodeContext.symbolsStore.addDSYM(outputUUID, dsymFile) | |
end | |
end | |
# Dequeue the Mach-O image we compiled, so we can move on to the next one. | |
remainingMachoImagesForArch.delete(machoImage) | |
else | |
# We didn't find a Mach-O image, so we have a cycle. | |
log = remainingMachoImagesForArch.map{|image| "#{image.machoFile.name}: #{(image.dylibNames & namesOfRemainingMachOs).join(', ')}" }.join("\n") | |
FatalError(__LINE__, "failed to compile bitcode: found a cycle involving Mach-Os:\n#{log.indent}") | |
end | |
end | |
end | |
# Finally, reassemble universal (multi-architecture) Mach-Os from the thin ones. | |
for machoFile in machoFilesToReassemble | |
# If we get here, we know that we have thin Mach-Os for every architecture (we exploded them into thin files). | |
log { " #{"| " * level}Reassembling #{machoFile.path.relative_path_from(ipa.payloadPath)} [#{machoFile.machoImages.map{ |img| img.arch }.join(", ")}]" } | |
univMachoFilePath = machoFile.universalReassemblyPath(ipa, options) | |
assert(!univMachoFilePath.exist?, "Duplicate output: #{univMachoFilePath.to_s}") | |
FS.mkdir_p(univMachoFilePath.parent) | |
# :: [[arch, path]] | |
inputs = machoFile.machoImages.map { |img| [img.arch, img.thinnedOutputPath(ipa, options)] } | |
if options.compileBitcode && options.translateWatchOS && machoFile.enclosingBundle.platform.identifier == "watchos" && inputs.count == 1 && inputs[0][0] == "armv7k" | |
if machoFile.enclosingBundle.isWatchKitStubApp && (machoFile.enclosingBundle.mainMachoFile == machoFile || machoFile.path.relative_path_from(machoFile.enclosingBundle.path) == Pathname("_WatchKitStub/WK")) | |
sdkStubPath = machoFile.enclosingBundle.platform.sdkPath + "Library/Application Support/WatchKit/WK" | |
archs = ["armv7k", "arm64_32"] | |
stubImgs = (GetMachOImagesFromOToolInfoForFile(sdkStubPath) || []).select { |i| archs.include?(i.arch) } | |
raise "Couldn't find #{archs} in #{sdkStubPath.to_s}" if stubImgs.count != archs.count | |
# this has the side-effect of updating the machoFile parent pointer on each image, and we need that to call thinToPath | |
FSMachOFileNode.new(sdkStubPath, nil, stubImgs) | |
inputs = stubImgs.collect {|stubImg| | |
thinnedStubPath = options.tmpDir + "WK_AST_stub_#{stubImg.arch}" | |
# we're going to come through here twice: once for the main executable stub and once for the _WatchKitStub stub | |
unless thinnedStubPath.exist? | |
stubImg.thinToPath(thinnedStubPath, level+1) | |
end | |
[stubImg.arch, thinnedStubPath] | |
} | |
else | |
inputs << ["arm64_32", machoFile.machoImages[0].thinnedOutputPath(ipa, options, "arm64_32")] | |
end | |
end | |
if inputs.count == 1 | |
# We have only a single Mach-O image, so we just copy the thin file instead of creating a universal Mach-O with a single image. | |
path = inputs[0][1] | |
FS.cp(path, univMachoFilePath) | |
log { " #{"| " * level} [cp #{path.to_s.shellescape} #{univMachoFilePath.to_s.shellescape}]" } | |
else | |
# We have more than one Mach-O image, so we use 'lipo' to put them back together. | |
cmdln = [ "-create" ] | |
for arch, path in inputs | |
cmdln += [ "-arch", arch, path ] | |
end | |
cmdln += [ "-output", univMachoFilePath ] | |
CmdSpec.new(locate_tool("lipo", [machoFile.platform.toolsPath]), cmdln).run(level, true) | |
end | |
bitcodeContext.addReassembledMachO(machoFile.path, univMachoFilePath) | |
end | |
end # isBitcodeCompilationRootBundle | |
EmitEventLogStopTrace(taskId, "#{options.compileBitcode ? "Compiling" : "Stripping"} Bitcode") | |
end | |
# If `ipa` contains an embedded Watch app, create a new IPA which contains a top-level Watch app. | |
def ExtractWatchIPA(ipa, options) | |
watch_app = ipa.mainBundle.watchAppChild | |
return nil unless watch_app | |
taskId = EmitEventLogStartTrace("Extract Watch IPA", { :ipa => ipa.path }) | |
log { "Extracted embedded Watch IPA: #{watch_app.path}" } | |
watch_ipa = EmbeddedWatchIPA.new(ipa.path, watch_app.path) | |
EmitEventLogStopTrace(taskId, "Extract Watch IPA") | |
return watch_ipa | |
end | |
# Main function for processing an unpacked IPA with a set of options. | |
# The first parameter is a directory containing the unpacked IPA contents, the second parameter is an options OpenStruct. | |
def ProcessIPA(ipaDir, options) | |
taskId = EmitEventLogStartTrace("Processing IPA", extraInfo: { :path => ipaDir }) | |
# Create a model object to represent the IPA. This will scan the file system hierarchy and return an object tree. | |
log { "Scanning IPA..." } | |
root_ipa = IPA.new(ipaDir) | |
root_is_watch_only_container = root_ipa.mainBundle && plistBoolValue(root_ipa.mainBundle.infoDict["ITSWatchOnlyContainer"] || false) | |
watch_ipa = ExtractWatchIPA(root_ipa, options) | |
if watch_ipa | |
JsonOutput[:extractedWatchIPAInputPath] = watch_ipa.path.to_s | |
if root_is_watch_only_container | |
root_ipa = watch_ipa | |
root_is_watch_only_container = false | |
watch_ipa = nil | |
else | |
watch_ipa.mainBundle.watchKit2ContaineriOSApp = root_ipa.mainBundle | |
end | |
end | |
root_ipas = [root_ipa, watch_ipa].compact | |
log { "Root IPAs:\n#{root_ipas.collect { |ipa| ipa.path.to_s }}" } | |
bitcodeContext = BitcodeContext.new | |
for ipa in root_ipas | |
thinDevices = options.thinDevices | |
# Create a new IPA from source, because srcIPA will be mutated (todo stop mutating srcIPA) | |
cleanSrcIPA = if OPTIONS.validateOutput then ipa.class.new(ipa.path, ipa.mainBundle.path) else nil end | |
# Check the input. We always do this, since it's not very expensive and it makes many subsequent errors much clearer. | |
ValidateIPA(ipa) | |
if $EMITTED_ERRORS | |
exit(1) | |
end | |
# Get a hold of the main bundle. | |
mainBundle = ipa.mainBundle | |
# If we weren't able to create a main bundle, we cannot proceed. | |
if mainBundle == nil | |
FatalError(__LINE__, "could not find main bundle in IPA “#{OPTIONS.ipaName}”") | |
end | |
unless mainBundle.kind_of?(FSBundleDirectoryNode) | |
FatalError(__LINE__, "main bundle in IPA “#{OPTIONS.ipaName}” doesn't seem to be well-formed") | |
end | |
# Check that the processing options make sense for the input IPA. Otherwise we warn about it and disable asset packs. | |
if options.createAssetPacks and not ipa.hasAssetPacks | |
# We've been asked to create on-demand asset packs but the IPA doesn't contain any. | |
EmitWarning(__LINE__, "Asset pack creation requested but IPA doesn't contain any ODR assets; ignoring the request") | |
options.createAssetPacks = false | |
end | |
# Deal with bitcode. If we've been asked to compile bitcode, we do so. If we haven't been asked to compile bitcode, | |
# but we have been asked to do thinning, we strip bitcode. We also strip bitcode (instead of compiling it) if we've | |
# been asked to compile bitcode but the main exectuable doesn't contain bitcode (this choice is made separately for | |
# every platform). | |
if root_ipa == ipa && (options.compileBitcode || !thinDevices.empty?) | |
# Compile and/or strip any bitcode in the bundle. This doesn't modify the input; any compiled or stripped binaries | |
# are written to the temporary directory, and individual nodes in the node hierarchy are made to point to it. | |
log { "#{options.compileBitcode ? "Compiling" : "Stripping"} bitcode..." } | |
CompileOrStripBitcodeInBundle(mainBundle, ipa, bitcodeContext) | |
# Explicitly ignore ipa.vpnPlugins. vpnPlugins are signed and we cannot break their signature. | |
end | |
# Create thinned-out device-specific IPAs, if requested. | |
if !thinDevices.empty? | |
# First we need to determine the relevant platform; it is the platform of the main bundle. Embedded bundles may | |
# have different platforms (e.g. a WatchKit app embedded inside an iPhoneOS app) but as it is not the main bundle, | |
# it will not be thinned. This is because the type of device on which the main bundle is installed is known at | |
# installation time, allowing it to be thinned, the embedded bundle could later be transferred to any supported | |
# device (e.g. any type of Apple Watch), so we cannot thin it. | |
# Determine the sets of device traits that actually matter for this payload. For example, if a particular | |
# device prefers armv7s and another device prefers armv7, but the payload only has armv7, then the thinned | |
# payload will be the same for both, and can be used for both devices (at least as far as architectures go). | |
# Same thing for the other traits. | |
traitsToVariants = makeThinningTraitsToThinningVariantsMap(mainBundle, thinDevices, OPTIONS.skipThinDevices, ipa) | |
if options.validateOutputZeroVariants && traitsToVariants.empty? && root_ipas.count == 1 | |
EmitIPAOutputValidationError("None of the specified thinning devices match the payload") | |
end | |
# Now iterate over the chosen traits sets and create thinned IPAs. For each trait set, we also know the list of | |
# model identifiers to which that thinned version applies. | |
for traits, thinningVariants in traitsToVariants | |
# Create a device-specific, thinned-out copy of the IPA for the set of traits. | |
# We start by determining the name to use for the thin IPA. We have been requested to obfuscate the name so that | |
# people don't try to make assumptions about what they see in it. The path is specified in the output JSON file, | |
# so there should never be a reason for a caller to make assumptions about the specific name. | |
traitsName = Digest::SHA1.hexdigest(traits.to_s) | |
# Call our main workhorse method to do the heavy lifting. | |
dstPath = options.outputPath + traitsName | |
thinningContext = ThinningContext.new(traits, traits, thinningVariants, dstPath, bitcodeContext, {}) | |
outputInfo = CreateOutputIPA(ipa, cleanSrcIPA, dstPath, thinningContext, options.createAssetPacks, options.urlPrefix, options.createAppPlaceholders) | |
# Make a note about the thinned IPA we created. | |
JsonOutput[:thinnedIPAs] ||= [] | |
JsonOutput[:thinnedIPAs] << { | |
path: outputInfo.path, | |
containsAssetPacks: (true if ipa.hasAssetPacks and not options.createAssetPacks), | |
placeholderAppPath: outputInfo.placeholderAppPath, | |
devices: thinningVariants.collect{|x|x.device.productType}.sort.uniq, | |
installTargets: thinningVariants.collect{|x| | |
{ | |
deviceModel: x.device.productType, | |
operatingSystemVersion: x.traits.deploymentTarget.to_s | |
} | |
}.uniq, | |
sourceToDestinationMap: thinningContext.inputOutputMap, | |
traits: traits.to_dict, | |
thinnableAssetCatalogs: ipa.thinnableAssetCatalogs.collect { |ac| ac.path.to_s }.sort, | |
}.reject { |k, v| v.nil? } | |
if outputInfo.assetPackOutputDir | |
# Make a note about the thinned asset packs we created. | |
JsonOutput[:thinnedAssetPackSets] = [] unless JsonOutput[:thinnedAssetPackSets] | |
JsonOutput[:thinnedAssetPackSets] << { | |
path: outputInfo.assetPackOutputDir, | |
manifest: outputInfo.assetPackManifestPath, | |
devices: thinningVariants.collect{|x|x.device.productType}.sort.uniq, | |
installTargets: thinningVariants.collect{|x| | |
{ | |
deviceModel: x.device.productType, | |
operatingSystemVersion: x.traits.deploymentTarget.to_s | |
} | |
}.uniq, | |
traits: traits.to_dict, | |
}.reject { |k, v| v.nil? } | |
end | |
log { "Created IPA at: #{outputInfo.path.to_s.shellescape}" } | |
log { " and ODRs at: #{outputInfo.path.parent + (outputInfo.path.basename.to_s + "-assetpacks")}" } if options.createAssetPacks | |
log { " and PLCs at: #{outputInfo.path.parent + (outputInfo.path.basename.to_s + "-placeholder")}" } if options.createAppPlaceholders | |
log { " for deployment variants: #{thinningVariants.collect{|x|x.to_s}.sort.join(", ")}" } | |
end | |
end | |
# Unless we're just being asked for information or validating, we should also create the universal IPA. | |
if thinDevices.count > 0 || OPTIONS.compileBitcode | |
# Create a device-generic, universal copy of the IPA (though we might have compiled bitcode, and might or might not | |
# be extracting asset packs and creating placeholder appwrappers, etc... all we know is that we're not thinning). | |
# Call our main workhorse method to do the heavy lifting. | |
dstPath = options.outputPath + "universal-#{ipa.mainBundle.platform.identifier}" | |
outputInfo = CreateOutputIPA(ipa, cleanSrcIPA, dstPath, ThinningContext.new(nil, nil, nil, dstPath, bitcodeContext, nil), options.createAssetPacks, options.urlPrefix, options.createAppPlaceholders) | |
# Make a note about the universal IPA we created. (Legacy, see universalIPAs below.) | |
if root_ipa == ipa | |
JsonOutput[:universalIPA] = { | |
path: outputInfo.path, | |
platform: ipa.mainBundle.platform.identifier, | |
containsAssetPacks: (true if ipa.hasAssetPacks and not options.createAssetPacks), | |
placeholderAppPath: outputInfo.placeholderAppPath, | |
}.reject { |k, v| v.nil? } | |
end | |
JsonOutput[:universalIPAs] ||= [] | |
JsonOutput[:universalIPAs] << { | |
path: outputInfo.path, | |
platform: ipa.mainBundle.platform.identifier, | |
containsAssetPacks: (true if ipa.hasAssetPacks and not options.createAssetPacks), | |
placeholderAppPath: outputInfo.placeholderAppPath, | |
}.reject { |k, v| v.nil? } | |
if outputInfo.assetPackOutputDir && ipa == root_ipa | |
# Make a note about the asset packs we created. | |
JsonOutput[:universalAssetPackSet] = { | |
path: outputInfo.assetPackOutputDir, | |
platform: ipa.mainBundle.platform.identifier, | |
manifest: outputInfo.assetPackManifestPath, | |
}.reject { |k, v| v.nil? } | |
end | |
end | |
# Emit information about the payload, if requested. | |
if ipa == root_ipa && OPTIONS.printInfo | |
def NestedPayloadBundleInfoForJSON(bundle) | |
# Construct and return a dictionary of bundle information, including for any nested bundles. | |
info = { | |
path: bundle.path, | |
identifier: bundle.identifier, | |
platform: bundle.platformIdentifier, | |
isBitcodeCompilationRootBundle: bundle.isBitcodeCompilationRootBundle, | |
bitcodeCompilationRootMachO: bundle.bitcodeCompilationRootMachO.path, | |
machoFiles: bundle.machoFiles.collect do |mf| | |
# Construct a dictionary for the Mach-O file. | |
{ path: mf.path, | |
slices: mf.machoImages.collect do |img| | |
# Construct a dictionary for the Mach-O slice. | |
{ type: img.type, | |
platform: img.platformIdentifier, | |
arch: img.arch, | |
uuid: img.uuid, | |
dylibNames: img.dylibNames, | |
hasExecCode: img.hasExecCode, | |
hasBitcode: img.hasBitcode, | |
isSigned: img.isSigned, | |
} | |
end | |
} | |
end, | |
} | |
for subbundle in bundle.nestedBundles | |
(info[:subbundles] ||= []) << NestedPayloadBundleInfoForJSON(subbundle) | |
end | |
return info | |
end | |
# Output the new-style, nested information. | |
mainBundleInfo = NestedPayloadBundleInfoForJSON(mainBundle) || [] | |
# Add asset pack information for the main bundle (not submodules). | |
mainBundleInfo[:assetPacks] = ipa.assetPacks.collect { |assetPack| assetPack.name } if ipa.assetPacks | |
JsonOutput[:nestedPayloadBundleInfo] = mainBundleInfo | |
end | |
if OPTIONS.printInfo | |
# List supported variants | |
JsonOutput[:allSupportedThinningVariants] ||= [] | |
allThinningTraitsToVariants = makeThinningTraitsToThinningVariantsMap(mainBundle, [ThinningVariantCLISpec.fromArg("all")], [], ipa) | |
data = allThinningTraitsToVariants.collect {|_,vs| vs.collect { |v| v.to_dict }} | |
JsonOutput[:allSupportedThinningVariants].concat(data) | |
end | |
if $EMITTED_ERRORS | |
exit(1) | |
end | |
end | |
EmitEventLogStopTrace(taskId, "Processing IPA") | |
end | |
class ThinningTraits | |
# :: Gem::Version (min OS version) | |
attr_accessor :deploymentTarget | |
# :: Bool | |
attr_accessor :passDeploymentTargetToAssetutil | |
attr_accessor :preferredArch | |
attr_accessor :artworkDevIdiom | |
attr_accessor :artworkHostedIdioms | |
attr_accessor :artworkScaleFactor | |
attr_accessor :artworkDevSubtype | |
attr_accessor :artworkDisplayGamut | |
attr_accessor :artworkDynamicDisplayMode | |
attr_accessor :devPerfMemoryClass | |
attr_accessor :gfxFeatureSetClass | |
attr_accessor :gfxFeatureSetFallbacks | |
attr_accessor :featuresToRemove | |
attr_accessor :supportsEmbeddedWatchApp | |
attr_accessor :supportsEmbeddedUniversalWatchApp | |
attr_accessor :hasSwiftRuntime | |
attr_accessor :supportsEncryptionFormat2 | |
attr_accessor :coalescingGroup | |
def initialize(deploymentTarget, passDeploymentTargetToAssetutil, preferredArch, artworkDevIdiom, artworkHostedIdioms, artworkScaleFactor, artworkDevSubtype, artworkDisplayGamut, artworkDynamicDisplayMode, devPerfMemoryClass, gfxFeatureSetClass, gfxFeatureSetFallbacks, featuresToRemove, supportsEmbeddedWatchApp, supportsEmbeddedUniversalWatchApp, hasSwiftRuntime, supportsEncryptionFormat2, coalescingGroup) | |
deploymentTarget.assert_kind_of(Gem::Version) | |
@deploymentTarget = deploymentTarget | |
@passDeploymentTargetToAssetutil = passDeploymentTargetToAssetutil | |
@preferredArch = preferredArch | |
@artworkDevIdiom = artworkDevIdiom | |
@artworkHostedIdioms = artworkHostedIdioms | |
@artworkScaleFactor = artworkScaleFactor | |
@artworkDevSubtype = artworkDevSubtype | |
@artworkDisplayGamut = artworkDisplayGamut | |
@artworkDynamicDisplayMode = artworkDynamicDisplayMode | |
@devPerfMemoryClass = devPerfMemoryClass | |
@gfxFeatureSetClass = gfxFeatureSetClass | |
@gfxFeatureSetFallbacks = gfxFeatureSetFallbacks | |
@featuresToRemove = featuresToRemove | |
@supportsEmbeddedWatchApp = supportsEmbeddedWatchApp | |
@supportsEmbeddedUniversalWatchApp = supportsEmbeddedUniversalWatchApp | |
@hasSwiftRuntime = hasSwiftRuntime | |
@supportsEncryptionFormat2 = supportsEncryptionFormat2 | |
@coalescingGroup = coalescingGroup | |
end | |
def artworkDynamicDisplayMode_1080pSDR | |
return "1080pSDR" | |
end | |
def preferredArchitectureAmongCandidates(availableArchs) | |
# Given an array of available architectures, this method returns the one that's preferred. If none are supported, we return nil. | |
return CPUArchitecture.get(preferredArch).runnable_architectures.find{|a| availableArchs.include?(a) } | |
end | |
def supportedIdioms | |
return ([self.artworkDevIdiom] + (if self.artworkDevIdiom == "pad" then ["phone"] else [] end)).sort | |
end | |
def ==(other) | |
self.to_s == other.to_s | |
end | |
def <=>(other) | |
to_s <=> other.to_s | |
end | |
def eql?(other) | |
return self == other | |
end | |
def hash | |
return to_s.hash | |
end | |
def to_s | |
return to_dict.to_s | |
end | |
def to_dict | |
return { | |
deploymentTarget: deploymentTarget, | |
passDeploymentTargetToAssetutil: passDeploymentTargetToAssetutil, | |
architecture: preferredArch, | |
artworkDevIdiom: artworkDevIdiom, | |
artworkHostedIdioms: artworkHostedIdioms, | |
artworkScaleFactor: artworkScaleFactor, | |
artworkDevSubtype: artworkDevSubtype, | |
artworkDisplayGamut: artworkDisplayGamut, | |
artworkDynamicDisplayMode: artworkDynamicDisplayMode, | |
devPerfMemoryClass: devPerfMemoryClass, | |
gfxFeatureSetClass: gfxFeatureSetClass, | |
gfxFeatureSetFallbacks: gfxFeatureSetFallbacks, | |
featuresToRemove: featuresToRemove, | |
supportsEmbeddedWatchApp: supportsEmbeddedWatchApp, | |
supportsEmbeddedUniversalWatchApp: supportsEmbeddedUniversalWatchApp, | |
hasSwiftRuntime: hasSwiftRuntime, | |
supportsEncryptionFormat2: supportsEncryptionFormat2, | |
coalescingGroup: coalescingGroup, | |
}.reject { |k, v| v.nil? } | |
end | |
# [(assetutilFlagName: String, traitsPropertyName: Symbol, traitsPropertyValueTransformer: Any -> String)] | |
# | |
# How do you translate a device traits object for use with assetutil? You need to know which traits correspond to which assetutil flags and how to convert their values to assetutil's representation. This specifies that with a list of triples: the assetutil flag name, the corresponding ThinningTraits property name, and the function needed to convert the property value into a string to be used as an assetutil argument. | |
def assetutil_translation_map | |
id = Proc.new { |value| value.to_s } | |
join_colon = Proc.new { |value| value.join(':') } | |
join_comma = Proc.new { |value| value.join(',') } | |
deploymentTargetTranslation = [] | |
if passDeploymentTargetToAssetutil | |
deploymentTargetTranslation = [["deployment-target", :deploymentTarget, id]] | |
end | |
return [ | |
["scale", :artworkScaleFactor, id], | |
["idiom", :artworkDevIdiom, id], | |
["subtype", :artworkDevSubtype, id], | |
["display-gamut", :artworkDisplayGamut, id], | |
["memory", :devPerfMemoryClass, id], | |
["graphicsclass", :gfxFeatureSetClass, id], | |
["graphicsclassfallbacks", :gfxFeatureSetFallbacks, join_colon], | |
["hostedidioms", :artworkHostedIdioms, join_comma], | |
] + deploymentTargetTranslation | |
end | |
# UIDeviceFamily #uidf may be a [String], [Int], or comma-separated String. | |
def self.assetutil_idioms_for_UIDeviceFamily(uidf) | |
uidf = [uidf].flatten.join(',').split(',').map{|x| x.to_s.strip.nil_if_empty }.compact.sort.uniq | |
return uidf.map{|f| | |
case f | |
when "1" | |
"phone" | |
when "2" | |
"pad" | |
when "3" | |
"tv" | |
when "4" | |
"watch" | |
else | |
raise "Unknown UIDeviceFamily: '#{f}' from '#{uidf}'" | |
end | |
} | |
end | |
# Return a copy of self where traits which affect assetutil have been taken from `other` | |
def take_assetutil_traits(other) | |
return ThinningTraits.new(other.deploymentTarget, other.passDeploymentTargetToAssetutil, self.preferredArch, other.artworkDevIdiom, other.artworkHostedIdioms, other.artworkScaleFactor, other.artworkDevSubtype, other.artworkDisplayGamut, other.artworkDynamicDisplayMode, other.devPerfMemoryClass, other.gfxFeatureSetClass, other.gfxFeatureSetFallbacks, self.featuresToRemove, self.supportsEmbeddedWatchApp, self.supportsEmbeddedUniversalWatchApp, self.hasSwiftRuntime, self.supportsEncryptionFormat2, self.coalescingGroup).freeze | |
end | |
# { assetutilFlag: String => assetutilValue: String ] | |
def to_assetutil_dict | |
return Hash.from_tuples(assetutil_translation_map.map{|flag,sym,transformer| | |
raw_value = self.send(sym) | |
next unless raw_value | |
t_value = transformer.call(raw_value) | |
next unless t_value.nil_if_empty | |
[flag, t_value] | |
}.compact) | |
end | |
# Array representation appropriate for assetutil parameters. | |
# E.g. ["--idiom", "pad", "--scale", "1"] | |
def to_assetutil_args_array | |
if self.artworkDynamicDisplayMode == self.artworkDynamicDisplayMode_1080pSDR | |
# We need two sets of args, one with 'scale:1 gamut:srgb' and one with 'scale:2 gamut:p3' | |
a = self.dup | |
a.artworkDynamicDisplayMode = nil | |
a.artworkScaleFactor = 1 | |
a.artworkDisplayGamut = 'sRGB' | |
b = self.dup | |
b.artworkDynamicDisplayMode = nil | |
b.artworkScaleFactor = 2 | |
b.artworkDisplayGamut = 'P3' | |
return a.to_assetutil_args_array + b.to_assetutil_args_array | |
else | |
return self.to_assetutil_dict.to_a.map{|flag, value| ["--" + flag, value] }.flatten | |
end | |
end | |
# String representation appropriate for assetutil -T. | |
# E.g.: scale=1:idiom=pad:subtype=0:display-gamut=P3:memory=3:graphicsClass=MTL1,2:deployment=2016 | |
def to_assetutil_T_string | |
# - graphicsclassfallbacks should not be passed to -T | |
skip = ["graphicsclassfallbacks"] | |
renames = { | |
"deployment-target" => "deployment" | |
} | |
if self.artworkDynamicDisplayMode == self.artworkDynamicDisplayMode_1080pSDR | |
# We need two sets of args, one with 'scale:1 gamut:srgb' and one with 'scale:2 gamut:p3' | |
a = self.dup | |
a.artworkDynamicDisplayMode = nil | |
a.artworkScaleFactor = 1 | |
a.artworkDisplayGamut = 'sRGB' | |
b = self.dup | |
b.artworkDynamicDisplayMode = nil | |
b.artworkScaleFactor = 2 | |
b.artworkDisplayGamut = 'P3' | |
return a.to_assetutil_T_string + ":" + b.to_assetutil_T_string | |
else | |
return self.to_assetutil_dict.delete_if{|k,v| skip.include?(k) }.to_a.map{|tuple| | |
tuple[0] = renames[tuple[0]] || tuple[0] | |
tuple.join('=') | |
}.join(':') | |
end | |
end | |
end | |
# All the thinning information we need to pass through to thin an app and its nested bundles. | |
class ThinningContext | |
# Current bundle traits | |
# :: ThinningTraits? | |
attr :traits | |
# Top-level app bundle traits | |
# :: ThinningTraits? | |
attr :mainTraits | |
# Top-level app bundle variants | |
# :: [ThinningVariant] | |
attr :variants | |
# :: Pathname | |
attr :outputIPAPath | |
attr :bitcodeContext | |
attr :inputOutputMap | |
def initialize(traits, mainTraits, variants, outputIPAPath, bitcodeContext, inputOutputMap) | |
@traits = traits | |
@mainTraits = mainTraits | |
@variants = variants | |
@outputIPAPath = outputIPAPath | |
@bitcodeContext = bitcodeContext | |
@inputOutputMap = inputOutputMap | |
end | |
def subscope(traits) | |
return ThinningContext.new(traits, mainTraits, variants, outputIPAPath, bitcodeContext, inputOutputMap) | |
end | |
def recordIO(a, b) | |
return if inputOutputMap.nil? | |
assert(inputOutputMap[a].nil?, "Duplicate I/O #{a} - #{b}") | |
inputOutputMap[a] = b | |
end | |
end | |
class DeviceType | |
attr :target # e.g. m68ap | |
attr :productType # e.g. iPhone1,1 | |
attr :displayName # e.g. iPhone 4s | |
def initialize(target, productType, displayName) | |
@target = target | |
@productType = productType | |
@displayName = displayName || productType | |
end | |
def to_s | |
return productType | |
end | |
def <=>(other) | |
self.to_s <=> other.to_s | |
end | |
def to_dict | |
return { | |
target: target, | |
productType: productType, | |
displayName: displayName | |
} | |
end | |
end | |
class ThinningVariant | |
attr_accessor :device # :: DeviceType | |
attr_accessor :traits # :: ThinningTraits | |
def initialize(device, traits) | |
device.assert_kind_of(DeviceType) | |
@device = device | |
traits.assert_kind_of(ThinningTraits) | |
@traits = traits | |
end | |
def to_s | |
return to_dict.to_s | |
end | |
def to_dict | |
return { | |
device: device.to_dict, | |
traits: traits.to_dict | |
} | |
end | |
end | |
def bestTraitsForBundle(bundle, traits, device) | |
bundle.assert_kind_of(FSBundleDirectoryNode) | |
traits.assert_kind_of(ThinningTraits) | |
device.assert_kind_of(DeviceType) | |
# Given a bundle whose supported device traits might not match what we'd prefer, we construct and return a device | |
# traits object that represents as good of a match as possible. If we cannot find one (e.g. if this is an armv7 | |
# device and the bundle only has arm64 code), we return nil. | |
supportedArchitectures = bundle.supportedArchitectures | |
if bundle.isWatchKitStubApp | |
supportedArchitectures = bundle.watchAppExChild.supportedArchitectures | |
executable = bundle.watchAppExChild.mainMachoFile | |
hasBitcode = executable.machoImages.any? {|i| i.hasBitcode} | |
if OPTIONS.translateWatchOS && OPTIONS.compileBitcode && hasBitcode | |
supportedArchitectures << "arm64_32" | |
supportedArchitectures.uniq! | |
end | |
end | |
# Note that we also imbue the returned device traits with the features-to-remove for this device (if any). | |
return nil,"the bundle doesn't contain any architectures in common among its executables" if supportedArchitectures.empty? | |
# Check the architecture. | |
bestArch = traits.preferredArchitectureAmongCandidates(supportedArchitectures) | |
return nil,"the device doesn't support any of the available architectures (#{supportedArchitectures.join(", ")})" unless bestArch | |
# Check the artwork idiom. | |
return nil,"the device doesn't support the app's UIDeviceFamily" unless (traits.supportedIdioms & bundle.supportedIdioms).size > 0 | |
bestArtworkDisplayGamut = traits.artworkDisplayGamut | |
if ["iPad6,3", "iPad6,4"].include?(device.productType) | |
if bundle.deploymentTarget && bundle.deploymentTarget < Gem::Version.new("10.0") | |
bestArtworkDisplayGamut = "all" | |
end | |
end | |
# If the bundle has no embedded Watch app, we can collapse this trait | |
supportsEmbeddedWatchApp = traits.supportsEmbeddedWatchApp && bundle.watchAppChild != nil | |
# If there's no watch content, we can collapse this trait | |
supportsEmbeddedUniversalWatchApp = traits.supportsEmbeddedUniversalWatchApp && bundle.nestedBundles.any? { |b| b.isWatchKitStubApp || b.isWatchKitAppExtension } | |
# If there's no Swift runtime lib content or this app was built before stable ABI, we can collapse this trait | |
hasSwiftRuntime = traits.hasSwiftRuntime | |
if hasSwiftRuntime | |
bundleUsesStableSwiftABI = bundle.builtWithXcodeVersion >= OPTIONS.xcodeVersionWithStableSwiftABI | |
bundleEmbedsSwiftRuntime = bundle.enumerateTree.any? { |node| node.kind_of?(FSMachOFileNode) && node.shouldThin && node.isSwiftRuntimeDylib } | |
unless bundleUsesStableSwiftABI && bundleEmbedsSwiftRuntime | |
hasSwiftRuntime = false | |
end | |
end | |
# If we get this far, we create a (possibly downshifted) | |
return ThinningTraits.new(traits.deploymentTarget, traits.passDeploymentTargetToAssetutil, bestArch, traits.artworkDevIdiom, traits.artworkHostedIdioms, traits.artworkScaleFactor, traits.artworkDevSubtype, bestArtworkDisplayGamut, traits.artworkDynamicDisplayMode, traits.devPerfMemoryClass, traits.gfxFeatureSetClass, traits.gfxFeatureSetFallbacks, traits.featuresToRemove, supportsEmbeddedWatchApp, supportsEmbeddedUniversalWatchApp, hasSwiftRuntime, traits.supportsEncryptionFormat2, traits.coalescingGroup).freeze, nil | |
end | |
# :: [ThinningTraits: [ThinningVariant]] | |
def makeThinningTraitsToThinningVariantsMap(bundle, thinDevices, skipThinDevices, ipa) | |
# We ask the bundle to figure out the supported platform; we are unable to proceed if we can't determine it. | |
platformIdent = bundle.platformIdentifier() | |
FatalError(__LINE__, "Cannot determine the supported platform for bundle at #{path.to_s.shellescape}") unless platformIdent | |
# Look up the corresponding platform object. | |
platform = Platform.platformForIdentifer(platformIdent) | |
FatalError(__LINE__, "Cannot find platform with identifier '#{platformIdent}'") unless platform | |
# Determine the sets of device traits that actually matter for this bundle. For example, if a particular | |
# device prefers armv7s and another device prefers armv7, but the bundle contains only armv7, then the thinned | |
# payload will be the same for both, and can be used for both devices (at least as far as architectures go). | |
# Same thing for the other traits. | |
# Go through the device types and build a mapping from trait sets to device lists for the devices for which we | |
# will create thinned variants. | |
result = {} | |
result.default = [] | |
# First we check if the device is even selected by the input options; if it isn't, we skip to the next one. | |
candidates = platform.thinningVariants.select { |v| | |
ThinningVariantCLISpec.any_matches(thinDevices, v) | |
} | |
# For example: if we have variants for 9, 10, 11, 12, ... and the app is 10.2, we want to discard 9 (too low for deployment target) and 11 (because we can't vend specific OS variants to pre-iOS 12 devices). This is defined by OS_VARIANTS_PLATFORM_VERSION. | |
perProductTypeCandidates = {} | |
for c in candidates | |
(perProductTypeCandidates[c.device.productType] ||= []) << c | |
end | |
candidates = [] | |
bundleTarget = bundle.deploymentTarget | |
bundleTargetMajor = bundleTarget.segments[0] | |
for _,cs in perProductTypeCandidates | |
cs2 = cs.select { |c| c.traits.deploymentTarget.segments[0] >= bundleTargetMajor } | |
topVariantLowerThanApp = cs2.select { |c| c.traits.deploymentTarget <= bundleTarget }.max { |a,b| a.traits.deploymentTarget <=> b.traits.deploymentTarget } | |
if topVariantLowerThanApp | |
# Remove variants below topVariantLowerThanApp | |
cs2 = cs2.reject { |c| c.traits.deploymentTarget < topVariantLowerThanApp.traits.deploymentTarget } | |
end | |
lowestVariant = cs2.min { |a,b| a.traits.deploymentTarget <=> b.traits.deploymentTarget } | |
if lowestVariant | |
# Remove variants between lowestVariant and osVariantsIntroducedInVersion | |
cs2 = cs2.reject { |c| lowestVariant.traits.deploymentTarget < c.traits.deploymentTarget && c.traits.deploymentTarget < platform.osVariantsIntroducedInVersion } | |
end | |
candidates += cs2 | |
end | |
# We need to check skipped variants after we've discarded unnecessary versions. Otherwise we might keep unnecessary versions because we've skipped what we would have normally selected. | |
candidates = candidates.reject { |v| | |
ThinningVariantCLISpec.any_matches(skipThinDevices, v) | |
} | |
for thinningVariant in candidates | |
# Otherwise we know that we want to thin for the device, but we check if we need to modify the device traits | |
# based on the bundle payload. There are two possibilities: either we use a (possibly modified) set of device | |
# traits (possibly modified, in case the payload contains something supported but not necessarily preferred by | |
# the device traits, e.g. it prefers armv64 but can tolerate armv7), or we get back no device traits at all, | |
# in case the device type doesn't support the bundle at all (e.g. an iPhone1,1 cannot run an app that doesn't | |
# contain armv6 code). | |
bestTraits,problemMessage = bestTraitsForBundle(bundle, thinningVariant.traits, thinningVariant.device) | |
# It's possible that we couldn't find any device trait set that the device wants and the bundle supports. | |
unless bestTraits | |
EmitWarning(__LINE__, "Thinning variant #{thinningVariant.to_s} isn't supported by the bundle at #{bundle.path.to_s.shellescape}: #{problemMessage || "unknown reason"}") | |
next | |
end | |
# If we get this far, we can create a thinned-down version for this type of device. We add the trait set to | |
# the list of ones we want to thin for, if we haven't already seen it, and we add the device to the list of | |
# devices for which that trait set is the best fit. | |
result[bestTraits] |= [ thinningVariant ] | |
end | |
def log_result(result, title) | |
log { | |
str = "" | |
str += "#{title}:\n" | |
for k,vs in result | |
str += "Traits key:\n".indent(1) | |
str += "#{k}\n".indent(2) | |
str += "\n" | |
str += "Grouped variants:\n".indent(1) | |
for v in vs | |
str += "#{v}\n".indent(2) | |
end | |
str += "---\n" | |
end | |
str | |
} | |
end | |
log_result(result, "makeThinningTraitsToThinningVariantsMap after bestTraitsForBundle") | |
if OPTIONS.useAssetutilEql | |
assetCatalogs = ipa.thinnableAssetCatalogs | |
log { "Thinnable AssetCatalogs:" } | |
log { assetCatalogs.map{|x|x.path.to_s}.join("\n").indent } | |
log { "Testing assetutil equality" } | |
assetTraitGroups = result.keys.sort.group_using { |a,b| | |
# .all? is always true when assetCatalogs is empty | |
assetCatalogs.all? {|ac| | |
log { "Testing assetutil equality for #{a} == #{b} using #{ac}" } | |
ac.eql_for_traits?(a, b, 1) | |
} | |
} | |
log { "AssetTraitGroups:" } | |
for g in assetTraitGroups | |
log { g.map{|x|x.to_s}.join("\n").indent } | |
next if g.size == 1 | |
# When coalescing equivalent traits, we want to keep whichever has the lowest deployment target | |
t0 = g.min { |x,y| x.deploymentTarget <=> y.deploymentTarget } | |
g.delete(t0) | |
tail = g | |
for t1 in tail | |
variants = result[t1] | |
result.delete(t1) | |
# This operation coalesces assetutil traits and merges values with other matching device traits. | |
t2 = t1.take_assetutil_traits(t0) | |
result[t2] += variants | |
end | |
end | |
end | |
log_result(result, "makeThinningTraitsToThinningVariantsMap after AssetCatalog.eql_for_traits?") | |
return result | |
end | |
$allKnownPlatformsByIdentifier = nil | |
# Represents a platform, including its identifier, its path, its SDK path, etc. Right now there's just one platform (iPhone OS), | |
# since Mac OS X isn't supported. But if we ever support any other platform, we want to be able to do so by merging in additional | |
# directories into the Platforms directory and have it all work. | |
class Platform | |
attr :identifier # canonical identifier, e.g. iphoneos | |
attr :mainPath # path of .platform folder itself | |
attr :sdkPath # path of the SDK to use for the platform | |
attr :toolsPath # path of platform-specific command line tools | |
attr :thinningVariants # :: [ThinningVariant] | |
attr :deploymentTargetWithSwiftRuntime # :: Gem::Version | |
# Returns a hash that maps platform identifier to Platform objects for all known platforms. Currently there's only iPhoneOS. | |
def self.allKnownPlatforms() | |
unless $allKnownPlatformsByIdentifier | |
# Create a hash that we'll add entries to. | |
$allKnownPlatformsByIdentifier = {} | |
# Look through the Platforms directory and find subdirectories having a ".platform" suffix. | |
for platformDir in OPTIONS.platformsDir.children.sort | |
# Skip it if it isn't a platform directory. | |
next unless platformDir.extname == ".platform" | |
# We can't count on .plists in the platform itself, so we look for an SDK directory with the same name as the platform. | |
sdksDir = platformDir + "Developer" + "SDKs" | |
sdkDir = sdksDir.children.sort.select{ |dir| dir.basename.to_s =~ /#{platformDir.basename(".platform").to_s}[0-9.]+\.sdk/ and not dir.basename.to_s =~ /[Ss]imulator/ }.sort.last rescue nil | |
# Ignore platforms without an interesting SDK. | |
unless sdkDir | |
EmitWarning(__LINE__, "Configuration issue: platform #{platformDir.basename} doesn't have any non-simulator SDKs; ignoring it") | |
next | |
end | |
# Try to load the 'SDKSettings.plist'. If we cannot find it, it isn't a valid SDK. | |
sdk_settings_path = sdkDir + "SDKSettings.plist" | |
if !File.exist?(sdk_settings_path) | |
EmitWarning(__LINE__, "Configuration issue: platform #{platformDir.basename} doesn't have a '#{sdkDir.basename}' SDK with a SDKSettings.plist; ignoring it") | |
next | |
end | |
sdkSettingsPlist = LoadPlist(sdk_settings_path) | |
# Look for the 'CanonicalName' key. | |
canonicalNameValue = sdkSettingsPlist.value.value["CanonicalName"] rescue nil | |
unless canonicalNameValue | |
EmitWarning(__LINE__, "Configuration issue: platform #{platformDir.basename} doesn't have an SDKSettings.plist with a 'CanonicalName' key at the top level") | |
next | |
end | |
platformIdent = canonicalNameValue.value.gsub(/[0-9.]*$/, "") | |
# Ignore uninteresting platforms such as Mac OS X. | |
next if platformIdent == "macosx" | |
# Instantiate a platform and add it to the mapping. | |
platform = Platform.new(platformIdent, platformDir, sdkDir) | |
$allKnownPlatformsByIdentifier[platformIdent] = platform if platform | |
end | |
end | |
return $allKnownPlatformsByIdentifier | |
end | |
## E.g. LC_VERSION_MIN_IPHONEOS -> iphoneos | |
def self.platformIdentifierForVersionMinLoadCommand(loadCommandName) | |
case loadCommandName.downcase | |
when "lc_version_min_tvos" | |
return "appletvos" | |
when /^lc_version_min_(.*)$/ | |
return $1 | |
else | |
raise "Unknown platform for load command: #{loadCommandName}" | |
end | |
end | |
def self.platformForIdentifer(ident) | |
return self.allKnownPlatforms[ident] | |
end | |
# :: Gem:Version. Which OS version introduced support for OS Variants? | |
def osVariantsIntroducedInVersion() | |
result = OS_VARIANTS_PLATFORM_VERSION[identifier] | |
raise "Could not find OS Variants version for platform #{identifier}" if result.nil? | |
return result | |
end | |
# :: Gem:Version. Which OS version introduced support for this? | |
def encryptionFormat2IntroducedInVersion() | |
result = ENCRYPTION_FORMAT_2_PLATFORM_VERSION[identifier] | |
raise "Could not find encryption format 2 version for platform #{identifier}" if result.nil? | |
return result | |
end | |
def initialize(platformIdent, mainPath, sdkPath) | |
# Record the basic information. | |
@identifier = platformIdent | |
@mainPath = mainPath | |
@sdkPath = sdkPath | |
@toolsPath = mainPath + "usr" + "bin" | |
# Determine the path of the device traits database that defines known devices for the platform. | |
deviceTraitsDBPath = mainPath + "usr" + "standalone" + "device_traits.db" | |
begin | |
SQLite3::Database.new(deviceTraitsDBPath.to_s, { readonly: true , results_as_hash: true } ) do |db| | |
deviceTraitsByID = {} # :: [Int: Row] | |
devicesByTarget = {} # :: [String: DeviceType] | |
deviceTraitsIDByTarget = {} # :: [String: Int] | |
@thinningVariants = [] # :: [ThinningVariant] | |
# Load the traits | |
db.execute("select * from DeviceTraits") do |row| | |
assert(deviceTraitsByID[row['DeviceTraitSetID']].nil?, "#{self.identifier}: conflicting DeviceTraitSetIDs for:\nOld: #{row['DeviceTraitSetID']}\nNew: #{row}") | |
deviceTraitsByID[row['DeviceTraitSetID']] = row | |
end | |
# Devices | |
db.execute("select * from Devices") do |row| | |
target = row['Target'] | |
productType = row['ProductType'] | |
displayName = row['ProductDescription'] | |
devicesByTarget[target] = DeviceType.new(target, productType, displayName).freeze | |
deviceTraitsIDByTarget[target] = row['DeviceTraitSet'] | |
end | |
@deploymentTargetWithSwiftRuntime = db.execute("select * from DeploymentTarget").select {|row| row["HasSwiftRuntime"] == 1 }.collect {|row| Gem::Version.new(row["TargetVersion"]) }.min | |
# [ThinningVariant] | |
db.execute("select * from DeploymentVariant") do |row| | |
deploymentTarget = Gem::Version.new(row['DeploymentTarget']) | |
target = row['DeviceTarget'] | |
device = devicesByTarget[target] | |
product = device.productType | |
# Filter out historical versions | |
if platformIdent == "watchos" | |
skip0 = product =~ /^Watch1,/ && (deploymentTarget < Gem::Version.new("4") || deploymentTarget >= Gem::Version.new("5")) | |
skip1 = product =~ /^Watch[2-4],/ && deploymentTarget < Gem::Version.new("5.2") | |
if skip0 || skip1 | |
log { "Skipping historical Watch: #{row}" } | |
next | |
end | |
end | |
traitsRow = nil | |
passDeploymentTargetToAssetutil = nil | |
if deploymentTarget < osVariantsIntroducedInVersion | |
traitsRow = deviceTraitsByID[deviceTraitsIDByTarget[target]] | |
passDeploymentTargetToAssetutil = false | |
else | |
traitsRow = deviceTraitsByID[row['DeviceTraitSet']] | |
passDeploymentTargetToAssetutil = true | |
end | |
architecture = row['TargetArchitecture'] || traitsRow['PreferredArchitecture'] | |
hasSwiftRuntime = @deploymentTargetWithSwiftRuntime && deploymentTarget >= @deploymentTargetWithSwiftRuntime | |
supportsEmbeddedWatchApp = traitsRow['ArtworkDeviceIdiom'] == "phone" && deploymentTarget < OPTIONS.iOSVersionWithWatchThinning | |
supportsEmbeddedUniversalWatchApp = supportsEmbeddedWatchApp && deploymentTarget >= Gem::Version.new("12") | |
supportsEncryptionFormat2 = if OPTIONS.supportEncryptionFormat2 then deploymentTarget >= encryptionFormat2IntroducedInVersion else false end | |
traits = ThinningTraits.new(deploymentTarget, | |
passDeploymentTargetToAssetutil, | |
architecture, | |
traitsRow['ArtworkDeviceIdiom'], | |
(traitsRow['ArtworkHostedIdioms'] || "").split(","), | |
traitsRow['ArtworkScaleFactor'], | |
traitsRow['ArtworkDeviceSubtype'], | |
traitsRow['ArtworkDisplayGamut'], | |
traitsRow['ArtworkDynamicDisplayMode'], | |
traitsRow['DevicePerformanceMemoryClass'], | |
traitsRow['GraphicsFeatureSetClass'], | |
(traitsRow['GraphicsFeatureSetFallbacks'] || "").split(":"), | |
(traitsRow['FeaturesToRemove'] || "").split(":"), | |
supportsEmbeddedWatchApp, | |
supportsEmbeddedUniversalWatchApp, | |
hasSwiftRuntime, | |
supportsEncryptionFormat2, | |
nil # coalescingGroup | |
) | |
variant = ThinningVariant.new(device, traits) | |
OPTIONS.coalescingGroups.each_with_index do |g,i| | |
if ThinningVariantCLISpec.any_matches(g, variant) | |
log { "Matched variant to coalescing group #{i}: variant = #{variant}, group = #{g}" } | |
traits.coalescingGroup = i | |
break | |
end | |
end | |
variant.traits = traits.freeze | |
@thinningVariants << variant.freeze | |
end | |
end | |
rescue => exc | |
EmitWarning(__LINE__, "Couldn't load device traits database for platform '#{platformIdent}' from #{deviceTraitsDBPath.to_s.shellescape}:\n#{exc.to_log_s}") | |
return nil | |
end | |
log { "Platform thinningVariants for '#{platformIdent}'" } | |
for v in thinningVariants | |
log { "#{v}".indent } | |
end | |
end | |
def to_s | |
return identifier | |
end | |
def <=>(other) | |
self.to_s <=> other.to_s | |
end | |
end | |
# Load any platform information we have up-front. We do this by asking for the mapping. | |
platforms = Platform.allKnownPlatforms() | |
FatalError(__LINE__, "couldn't find any platforms at all in #{OPTIONS.platformsDir}") if platforms.empty? | |
# At this point, the 'inputPath' option always refers to an unpacked IPA. We process it using the option dictionary we've built it. | |
ProcessIPA(OPTIONS.inputPath, OPTIONS) | |
# We're done; we specifically do not flatten the IPA back up, because it still needs to be signed (which is the caller's responsibility). |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment