Last active
August 9, 2016 16:16
-
-
Save luser/99de102e16cbeec3f5a0eec4d9b2fc9c to your computer and use it in GitHub Desktop.
convert NSS makefiles
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
*~ | |
*.json | |
*.pyc |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Scrape build output | |
make USE_64=1 NSPR_INCLUDE_DIR=/usr/include/nspr --dry-run > /tmp/nss.dry-run.log | |
python scrapemakeoutput.py /build/nss /tmp/nss.dry-run.log > data.json | |
# Get all variables used in manifest.mn | |
find . -name manifest.mn | xargs egrep -h '^(\w+)\s*=' | sed 's/^\(\w\+\)[ \t]*=.*/\1/' | sort | uniq -c | sort -n > /tmp/all-nss-vars | |
# Scrape makefiles (run in gecko srcdir) | |
./mach python /build/nssmakefiles/nssmakefiles.py /build/nss `pwd` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import json | |
import os | |
import itertools | |
import sys | |
def get_sources(f): | |
data = json.load(open(f, 'rb')) | |
for dir,v in data.iteritems(): | |
for s in itertools.chain.from_iterable(v.get(srcs, []) for srcs in ('CSRCS','CPPSRCS','ASFILES','sources')): | |
print os.path.join(dir, s) | |
if __name__ == '__main__': | |
get_sources(sys.argv[1]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
from __future__ import print_function | |
import argparse | |
import json | |
import platform | |
import os | |
import sys | |
from pymake import data, parser, parserdata, process | |
makepypath = '' | |
uname = platform.uname() | |
makevars = { | |
'OS_ARCH': uname[0], | |
'OS_RELEASE': '.'.join(uname[2].split('.')[:2]), | |
'CC_IS_GCC': '0', | |
# Can't set CC_IS_CLANG= because NSS Makefiles use `ifdef` on it. | |
'GCC_VERSION': '', | |
'BUILD_TREE': 'objdir', | |
'PLATFORM': 'platform', | |
'USERNAME': 'user', | |
# freebl does this awful recursive thing | |
# TODO: somehow munge this properly | |
'FREEBL_CHILD_BUILD': '1', | |
} | |
if platform.system() == 'Darwin': | |
sysname = 'mac' | |
makevars['CC'] = 'clang' | |
makevars['CC_NAME'] = 'clang' | |
makevars['CC_IS_CLANG'] = '1' | |
makevars['CC_IS_GCC'] = '1' | |
elif platform.system() == 'Linux': | |
sysname = 'linux' | |
makevars['CC'] = 'gcc' | |
makevars['CC_NAME'] = 'gcc' | |
makevars['CC_IS_GCC'] = '1' | |
makevars['GCC_VERSION'] = '4.8.4' | |
elif platform.system() == 'Windows': | |
sysname = 'win' | |
makevars['CC'] = 'cl' | |
makevars['CC_NAME'] = 'cl' | |
makevars['OS_ARCH'] = 'WINNT' | |
def parse_nss_makefile(path, topsrcdir, defines=None, cflags=None, cpu_arch='x86_64'): | |
relpath = os.path.relpath(os.path.dirname(path), topsrcdir).replace('\\', '/') | |
defines = set() if defines is None else defines | |
cflags = set() if cflags is None else cflags | |
stmts = parserdata.StatementList() | |
# Add some overrides so the NSS build system doesn't shell out so much. | |
for i, (vname, val) in enumerate([('PWD', os.path.dirname(path))] + makevars.items()): | |
t = '=' | |
vnameexp = data.Expansion.fromstring(vname, "Command-line argument") | |
stmts.append(parserdata.ExportDirective(vnameexp, concurrent_set=True)) | |
stmts.append(parserdata.SetVariable(vnameexp, token=t, | |
value=val, valueloc=parserdata.Location('<command-line>', i, len(vname) + len(t)), | |
targetexp=None, source=data.Variables.SOURCE_COMMANDLINE)) | |
# Parse the Makefile in `path`. | |
makefile = data.Makefile(restarts=0, | |
make='%s %s' % (sys.executable.replace('\\', '/'), makepypath.replace('\\', '/')), | |
makeflags='', | |
makeoverrides='', | |
workdir=os.path.dirname(path), | |
context=process.getcontext(1), | |
env=dict(os.environ), | |
makelevel=0, | |
targets=[], | |
keepgoing=False, | |
silent=False, | |
justprint=False) | |
stmts.execute(makefile) | |
makefile.include(path) | |
makefile.finishparsing() | |
md = {} | |
def eval_mkvar(var, split=False): | |
_f, _s, val = makefile.variables.get(var) | |
if val: | |
vs = val.resolvestr(makefile, makefile.variables).strip() | |
return vs.split() if split else vs | |
return [] if split else None | |
mk_dir = os.path.dirname(path) | |
# Get a list of all OBJS targets so we can check sources against them. | |
obj_targets = dict((os.path.splitext(os.path.basename(o))[0], makefile.gettarget(o)) for o in eval_mkvar('OBJS', True)) | |
# Now scrape out some variables: | |
# 1) These variables are all files, and should exist in the srcdir. | |
# We might have to find them with vpath. | |
for var in ('CSRCS', 'CPPSRCS', 'ASFILES', 'EXPORTS', 'PRIVATE_EXPORTS'): | |
val = eval_mkvar(var) | |
if val: | |
items = [] | |
for item in val.split(): | |
if os.path.isfile(os.path.join(mk_dir, item)): | |
items.append(item) | |
else: | |
for d in makefile.getvpath(item): | |
if os.path.isfile(os.path.join(mk_dir, d, item)): | |
items.append(os.path.join(d, item)) | |
break | |
else: | |
if item == 'certdata.c': | |
md['generated_sources'] = [item] | |
else: | |
raise Exception('Item in %s %s not found: %s' % (path, var, item)) | |
# See if there are target-specific variable overrides for this | |
# source file. | |
if 'EXPORTS' not in var: | |
base = os.path.splitext(os.path.basename(item))[0] | |
if base not in obj_targets: | |
print('%s target missing (%s)' % (item, base)) | |
print('obj_targets:\n========\n%s\n========' % '\n'.join(obj_targets.keys())) | |
t = obj_targets[base] | |
for name, flavor, source, val in t.variables: | |
if name != 'CFLAGS': | |
raise Exception('Unhandled variable: %s' % name) | |
if 'overrides' not in md: | |
md['overrides'] = {} | |
if item not in md['overrides']: | |
md['overrides'][item] = {} | |
md['overrides'][item][name] = val | |
md[var] = items | |
# 2) These variables are all lists. | |
for var in ('REQUIRES', 'EXTRA_LIBS', 'EXTRA_SHARED_LIBS', 'SHARED_LIBRARY_DIRS', 'INCLUDES', 'OS_LIBS'): | |
val = eval_mkvar(var) | |
if val: | |
md[var] = val.split() | |
# 3) DEFINES and CFLAGS get special handling: | |
# only include values not present in the parent. | |
this_defines = [] | |
this_cflags = [] | |
for var, current, this in [('DEFINES', defines, this_defines), | |
('CFLAGS', cflags, this_cflags)]: | |
val = eval_mkvar(var) | |
if val: | |
dl = val.split() | |
this[:] = [d for d in dl if d in (set(dl) - current)] | |
if this: | |
md[var] = this | |
# 4) These variables are all single-items. | |
for var in ('MODULE', 'LIBRARY_NAME', 'MAPFILE', 'MAPFILE_SOURCE', 'LIBRARY_VERSION', 'USE_STATIC_LIBS'): | |
val = eval_mkvar(var) | |
if val: | |
md[var] = val | |
# 5) These variables all get turned into pathnames by the build system, | |
# so just take the basename. | |
for var in ('LIBRARY', 'SHARED_LIBRARY', 'PROGRAM'): | |
bin = eval_mkvar(var) | |
if bin: | |
md[var] = os.path.basename(bin) | |
# 6) PROGRAMS is a list of pathnames. | |
progs = eval_mkvar('PROGRAMS') | |
if progs: | |
md['PROGRAMS'] = [os.path.basename(p) for p in progs.split()] | |
alldata = {relpath: md} | |
# 7) Get DIRS and traverse any directories listed. | |
flavor, source, dirs = makefile.variables.get('DIRS') | |
if dirs: | |
dirs_vals = dirs.resolvesplit(makefile, makefile.variables) | |
md['DIRS'] = dirs_vals | |
parent_dir = os.path.dirname(path) | |
for d in dirs_vals: | |
alldata.update(parse_nss_makefile(os.path.join(parent_dir, d, 'Makefile'), topsrcdir, defines or set(this_defines), cflags or set(this_cflags))) | |
return alldata | |
def main(): | |
parser = argparse.ArgumentParser(description='Parse NSS makefiles') | |
parser.add_argument('nss_srcdir', type=str, | |
help='The NSS source directory') | |
parser.add_argument('gecko_srcdir', type=str, | |
help='A gecko source directory') | |
parser.add_argument('--cpu-arch', type=str, default='x86_64', | |
help='Build for this CPU architecture') | |
args = parser.parse_args() | |
global makevars | |
if args.cpu_arch == 'x86_64': | |
makevars['USE_64'] = '1' | |
if platform.system() == 'Darwin' and args.cpu_arch == 'x86_64': | |
# this is always i386 on mac... | |
makevars['CPU_ARCH'] = 'i386' | |
makevars['OS_TEST'] = 'i386' | |
else: | |
makevars['CPU_ARCH'] = args.cpu_arch | |
makevars['OS_TEST'] = args.cpu_arch | |
global makepypath | |
makepypath = os.path.abspath(os.path.join(args.gecko_srcdir, 'build/pymake/make.py')) | |
data = parse_nss_makefile(os.path.join(args.nss_srcdir, 'Makefile'), args.nss_srcdir) | |
json.dump(data, open('/tmp/nss-makefile-data-%s-%s.json' % (sysname, args.cpu_arch), 'wb')) | |
if __name__ == '__main__': | |
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import fileinput | |
import json | |
import os | |
import re | |
import sys | |
from collections import defaultdict | |
if sys.platform.startswith('win'): | |
MAKE = 'mozmake' | |
CC = 'cl' | |
CXX = 'cl' | |
AS = 'ml64.exe' | |
LD = 'link' | |
def get_binary(link_bits): | |
for bit in link_bits: | |
if bit.startswith('-OUT:'): | |
return os.path.basename(bit[5:]) | |
elif bit.startswith('-Fe'): | |
return os.path.basename(bit[3:]) | |
return None | |
def normpath(path): | |
return re.sub('^/(.)/', r'\1:/', path) | |
else: | |
MAKE = 'make' | |
CC = 'gcc' | |
CXX = 'g++' | |
AS = 'gcc' | |
LD = 'gcc' | |
def get_binary(link_bits): | |
return os.path.basename(link_bits[link_bits.index('-o') + 1]) | |
def normpath(path): | |
return path | |
def main(): | |
srcdir = os.path.abspath(sys.argv.pop(1)) | |
dirs = ['.'] | |
data = defaultdict(lambda: defaultdict(list)) | |
i = iter(fileinput.input()) | |
cwd = '.' | |
abs_cwd = srcdir | |
for line in i: | |
line = line.rstrip() | |
while line.endswith('\\'): | |
line = line.rstrip('\\') + i.next() | |
line = line.strip() | |
if line.startswith(MAKE): | |
if 'Entering directory' in line: | |
if '`' in line: | |
q = line.index('`') | |
else: | |
q = line.index("'") | |
abs_cwd = line[q+1:-1] | |
cwd = os.path.relpath(abs_cwd, srcdir).replace('\\', '/') | |
dirs.append(cwd) | |
elif 'Leaving directory' in line: | |
dirs.pop() | |
cwd = dirs[-1] | |
elif line.startswith(CC) or line.startswith(CXX) or line.startswith(AS): | |
bits = line.split() | |
src = bits[-1].strip('"') | |
if src.endswith('.c') or src.endswith('.cc') or src.endswith('.s') or src.endswith('.asm'): | |
data[cwd]['compiles'].append(line) | |
obj = src.find('.OBJ') | |
if obj != -1: | |
src = src[src.find('/', obj)+1:] | |
src = normpath(src) | |
if os.path.isabs(src): | |
src = os.path.relpath(src, abs_cwd) | |
data[cwd]['sources'].append(src.replace('\\', '/')) | |
else: | |
data[cwd]['links'].append(line) | |
data[cwd]['binaries'].append(get_binary(bits)) | |
elif LD and line.startswith(LD): | |
data[cwd]['links'].append(line) | |
bits = line.split() | |
data[cwd]['binaries'].append(get_binary(bits)) | |
print json.dumps(data, indent=2) | |
if __name__ == '__main__': | |
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import itertools | |
import json | |
import os | |
import re | |
import shutil | |
import sys | |
from collections import OrderedDict, defaultdict | |
BOILERPLATE = '''\ | |
# This Source Code Form is subject to the terms of the Mozilla Public | |
# License, v. 2.0. If a copy of the MPL was not distributed with this | |
# file, You can obtain one at http://mozilla.org/MPL/2.0/. | |
''' | |
def write_gyp_file(filename, data): | |
with open(filename, 'wb') as f: | |
f.write(BOILERPLATE) | |
s = json.dumps(data, indent=2, separators=(',', ': ')) | |
f.write(re.sub(r'(?<!\\)"', '\'', s)) | |
def generate_target(target_name, target_type, sources, dependencies, extra=[]): | |
target = OrderedDict([ | |
('target_name', target_name), | |
('type', target_type), | |
]) | |
if sources: | |
target['sources'] = sources | |
if dependencies: | |
target['dependencies'] = dependencies | |
for k, v in extra: | |
target[k] = v | |
return target | |
def generate_copy(files, where): | |
return {'destination': os.path.join('<(PRODUCT_DIR)/dist/<(module)', where), | |
'files': files} | |
def generate_target_gyp(path, relpath, makefile_data, known_modules, all_targets): | |
targets = [] | |
module = None | |
# Extra shared libraries this target links against. | |
shlib_deps = [l[2:] for l in makefile_data.get('EXTRA_SHARED_LIBS', []) if l.startswith('-l')] | |
# Extra static libraries this target links against. | |
static_deps = [os.path.basename(l)[3:-2] for l in makefile_data.get('EXTRA_LIBS', []) if l.endswith('.a')] | |
# A few shared libraries in NSS use this to link all of the object | |
# files from the listed directories into the shared library. | |
shlib_dirs = [all_targets['static_by_dir'][os.path.normpath(os.path.join(relpath, d))] for d in makefile_data.get('SHARED_LIBRARY_DIRS', [])] | |
# Check for extra os libs. | |
default_os_libs = set(['-lpthread', '-ldl', '-lc']) | |
def remap_os_lib(lib): | |
if lib == '-lz': | |
return '<@(zlib_libs)' | |
return lib | |
os_libs = [l for l in makefile_data.get('OS_LIBS', []) if l not in default_os_libs] | |
# Filter out dist paths from INCLUDES because they come from REQUIRES | |
# and we handle those separately. | |
include_dirs = [i[2:] for i in makefile_data.get('INCLUDES', []) if 'dist' not in i] | |
# Some makefiles sneak include paths into CFLAGS... | |
include_dirs_set = set(include_dirs) | |
include_dirs.extend(f[2:] for f in makefile_data.get('CFLAGS', []) if f.startswith('-I') and 'dist' not in f and f[2:] not in include_dirs_set) | |
# Replace escaped quotes and platform-specific bits in defines | |
dllbits = {'DLL_SUFFIX': '<(dll_suffix)', 'DLL_PREFIX': '<(dll_prefix)', 'SHLIB_PREFIX': '<(dll_prefix)', 'SHLIB_SUFFIX': '<(dll_suffix)'} | |
defines = makefile_data.get('DEFINES', []) | |
for i, d in enumerate(defines): | |
d = d[2:].replace('\\"', '"') | |
if any(d.startswith(s) for s in dllbits.keys()): | |
k,_ = d.split('=', 1) | |
d = '%s="%s"' % (k, dllbits[k]) | |
defines[i] = d | |
overrides = makefile_data.get('overrides', {}) | |
sources = sorted(itertools.chain.from_iterable(makefile_data.get(srcs, []) for srcs in ('CSRCS','CPPSRCS','ASFILES'))) | |
# handle generating certdata.c | |
extra_target_bits = [] | |
if 'generated_sources' in makefile_data: | |
assert makefile_data['generated_sources'] == ['certdata.c'], 'Unhandled generated source files: %s' % str(makefile_data['generated_sources']) | |
sources.append('<(INTERMEDIATE_DIR)/certdata.c') | |
extra_target_bits.append(('actions', [{ | |
'action_name': 'generate_certdata_c', | |
'inputs': [ | |
'certdata.perl', | |
'certdata.txt', | |
], | |
'outputs': [ | |
'<(INTERMEDIATE_DIR)/certdata.c', | |
], | |
'action': ['/bin/sh', '-c', 'perl certdata.perl < certdata.txt > <@(_outputs)'], | |
}])) | |
# special-case gtests.o being in EXTRA_LIBS, put it in sources instead. | |
if any(l.endswith('gtests.o') for l in makefile_data.get('EXTRA_LIBS', [])): | |
sources.append('<(DEPTH)/external_tests/common/gtests.cc') | |
# handle MAPFILE | |
mapfile = makefile_data.get('MAPFILE', None) | |
mapfile_source = makefile_data.get('MAPFILE_SOURCE', None) | |
if mapfile and mapfile_source: | |
extra_target_bits.append(('variables', {'mapfile': mapfile_source})) | |
if 'MODULE' in makefile_data: | |
module = makefile_data['MODULE'] | |
requires = ['<(DEPTH)/exports.gyp:%s_exports' % r for r in (makefile_data.get('REQUIRES', []) + ([module] if module else [])) if r in known_modules] | |
if module == 'sectools': | |
# special-cased in ruleset.mk | |
include_dirs.extend('<(PRODUCT_DIR)/dist/%s/private' % r for r in makefile_data.get('REQUIRES', []) if r in known_modules) | |
depends = list(requires) | |
if shlib_deps: | |
# Take out NSPR libs | |
shlib_deps = sorted(set(shlib_deps) - set(['plc4', 'plds4', 'nspr4'])) | |
if relpath.startswith('cmd/'): | |
# Take out the bits from platlibs.gypi | |
shlib_deps = sorted(set(shlib_deps) - set(['nssutil3', 'ssl3', 'smime3', 'nss3'])) | |
depends.extend(all_targets['shared'][l] for l in shlib_deps) | |
if static_deps: | |
if relpath.startswith('cmd/'): | |
# Take out the bits from platlibs.gypi | |
if 'USE_STATIC_LIBS' in makefile_data: | |
static_deps = sorted(set(static_deps) - set(['smime', 'ssl', 'nss', 'sectool', 'pkcs12', 'pkcs7', 'certhi', 'cryptohi', 'pk11wrap', 'softokn', 'certdb', 'nsspki', 'nssdev', 'nssb', 'freebl', 'dbm', 'pkixtop', 'pkixutil', 'pkixsystem', 'pkixcrlsel', 'pkixmodule', 'pkixstore', 'pkixparams', 'pkixchecker', 'pkixpki', 'pkixtop', 'pkixresults', 'pkixcertsel', 'nss', 'pk11wrap', 'certhi'])) | |
else: | |
static_deps.remove('sectool') | |
depends.extend(all_targets['static'][l] for l in static_deps) | |
if '-lz' in os_libs: | |
os_libs.remove('-lz') | |
depends.append('<(DEPTH)/lib/zlib/zlib.gyp:zlib') | |
if os_libs: | |
extra_target_bits.append(('libraries', os_libs)) | |
if 'PROGRAM' in makefile_data: | |
targets.append(generate_target(makefile_data['PROGRAM'], | |
'executable', | |
sources, | |
depends, | |
extra_target_bits)) | |
elif 'PROGRAMS' in makefile_data: | |
for p in makefile_data['PROGRAMS']: | |
progsrc = [s for s in sources if s.startswith(p)] | |
assert progsrc, 'Missing source for PROGRAMS' | |
targets.append(generate_target(p, | |
'executable', | |
progsrc, | |
depends, | |
extra_target_bits)) | |
elif 'LIBRARY_NAME' in makefile_data: | |
target_name = makefile_data['LIBRARY_NAME'] | |
staticlib = 'LIBRARY' in makefile_data | |
sharedlib = 'SHARED_LIBRARY' in makefile_data | |
static_name = target_name | |
target_name += makefile_data.get('LIBRARY_VERSION', '') | |
if sharedlib and target_name == static_name: | |
static_name += '_static' | |
# If we have overrides we have to split them out into a | |
# separate static_library because gyp doesn't have a way | |
# to set per-source-file flags. | |
for src, flags in overrides.iteritems(): | |
sources.remove(src) | |
assert flags.keys() == ['CFLAGS'], 'Unhandled flag override %s' % flag | |
source_target_name = src.replace('.', '_') + '_lib' | |
targets.append(generate_target(source_target_name, | |
'static_library', | |
[src], | |
requires, | |
[('cflags', [flags['CFLAGS']])])) | |
depends.append(source_target_name) | |
# freebl is terrible | |
if relpath == 'lib/freebl': | |
shlib_extra = [('ldflags', ['-Wl,-Bsymbolic'])] | |
targets.append(generate_target('freebl', | |
'static_library', | |
['loader.c'], | |
requires)) | |
targets.append(generate_target('freebl3', | |
'shared_library', | |
['lowhash_vector.c'], | |
requires, | |
[('variables', {'mapfile': 'freebl_hash.def'})])) | |
else: | |
shlib_extra = [] | |
if staticlib: | |
targets.append(generate_target(static_name, | |
'static_library', | |
sources, | |
depends)) | |
if sharedlib: | |
targets.append(generate_target(target_name, | |
'shared_library', | |
[], | |
[static_name] + shlib_dirs, | |
extra_target_bits + shlib_extra)) | |
else: | |
targets.append(generate_target(target_name, | |
'shared_library', | |
sources, | |
depends + shlib_dirs, | |
extra_target_bits + shlib_extra)) | |
copies = [] | |
export_target = None | |
for var, where in [('EXPORTS', 'public'), | |
('PRIVATE_EXPORTS', 'private')]: | |
if var in makefile_data: | |
copies.append(generate_copy(sorted(makefile_data[var]), where)) | |
relname = relpath.replace('/', '_') | |
if copies: | |
export_target_name = relname + '_exports' | |
export_target = generate_target(export_target_name, 'none', [], []) | |
export_target['copies'] = copies | |
if not (targets or copies): | |
print 'No targets or copies in ', path | |
return None, None, None | |
this_gyp = os.path.join('<(DEPTH)', relpath, os.path.basename(relpath) + '.gyp') | |
for target in targets: | |
this_target = this_gyp + ':' + target['target_name'] | |
if target['type'] == 'static_library': | |
all_targets['static'][target['target_name']] = this_target | |
all_targets['static_by_dir'][relpath] = this_target | |
elif target['type'] == 'shared_library': | |
all_targets['shared'][target['target_name']] = this_target | |
elif target['type'] == 'executable': | |
all_targets['program'][target['target_name']] = this_target | |
variables = OrderedDict() | |
if module: | |
variables['module'] = module | |
if 'USE_STATIC_LIBS' in makefile_data: | |
variables['use_static_libs'] = 1 | |
if relpath == '.': | |
gyp_file = os.path.join(path, 'nss.gyp') | |
includes = ['coreconf/config.gypi'] | |
else: | |
gyp_file = os.path.join(path, os.path.basename(path) + '.gyp') | |
# Can't use <(DEPTH) in 'includes'. | |
depth = '/'.join('..' for _ in relpath.split('/')) | |
includes = ['%s/coreconf/config.gypi' % depth] | |
# Replicate platlibs.mk for targets in cmd. | |
if relpath.startswith('cmd/') and relpath != 'cmd/lib': | |
includes.append('%s/cmd/platlibs.gypi' % depth) | |
gyp_data = OrderedDict() | |
if includes: | |
gyp_data['includes'] = includes | |
gyp_data['targets'] = targets | |
if (include_dirs or defines) and relpath != '.': | |
target_defaults = OrderedDict() | |
if include_dirs: | |
target_defaults['include_dirs'] = include_dirs | |
if defines: | |
target_defaults['defines'] = defines | |
gyp_data['target_defaults'] = target_defaults | |
if variables: | |
gyp_data['variables'] = variables | |
write_gyp_file(gyp_file, gyp_data) | |
# write a separate exports.gyp for reasons | |
if export_target: | |
exports_gyp = os.path.join(path, 'exports.gyp') | |
write_gyp_file(exports_gyp, OrderedDict([ | |
('includes', [includes[0]]), | |
('targets', [export_target]), | |
('variables', variables)])) | |
return gyp_file, module, '%s:%s' % (os.path.join(relpath, 'exports.gyp'), export_target['target_name']) if export_target else None | |
def iter_dirs(data, path): | |
yield path | |
for subdir in data[path].get('DIRS', []): | |
subdir = os.path.join(path, subdir) if path != '.' else subdir | |
for subpath in iter_dirs(data, subdir): | |
yield subpath | |
def main(): | |
nss_srcdir = os.path.abspath(sys.argv[1]) | |
makefile_data = json.load(open(sys.argv[2], 'rb')) | |
exports = defaultdict(list) | |
# NSS Makefiles have lots of bogus REQUIRES. | |
known_modules = set(v['MODULE'] for v in makefile_data.itervalues() if 'MODULE' in v and ('EXPORTS' in v or 'PRIVATE_EXPORTS' in v)) | |
all_targets = {'static': {}, 'shared': {}, 'program': {}, 'static_by_dir': {}} | |
for d in iter_dirs(makefile_data, '.'): | |
gyp_file, module, export_target = generate_target_gyp( | |
os.path.join(nss_srcdir, d), | |
d, | |
makefile_data[d], | |
known_modules, | |
all_targets) | |
bits = d.split('/') | |
if module and export_target: | |
exports[module].append(export_target) | |
# special-case zlib | |
exports['nss'].append('lib/zlib/exports.gyp:lib_zlib_exports') | |
write_gyp_file(os.path.join(nss_srcdir, 'nss.gyp'), | |
OrderedDict([ | |
('includes', ['coreconf/config.gypi']), | |
('targets', [ | |
OrderedDict([ | |
('target_name', 'nss_all'), | |
('type', 'none'), | |
# TODO: separate out libs and binaries, and also | |
# try to separate out tests into a separate target. | |
# maybe even stick them into separate gyp files? | |
('dependencies', sorted(all_targets['shared'].values()) + sorted(all_targets['program'].values())), | |
]) | |
]), | |
])) | |
write_gyp_file(os.path.join(nss_srcdir, 'exports.gyp'), | |
OrderedDict([ | |
('includes', ['coreconf/config.gypi']), | |
('targets', [OrderedDict([ | |
('target_name', module + '_exports'), | |
('type', 'none'), | |
('direct_dependent_settings', { | |
'include_dirs': ['<(PRODUCT_DIR)/dist/%s/public' % module], | |
}), | |
('dependencies', sorted(deps)), | |
]) for module, deps in exports.iteritems()]), | |
])) | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment