Created
October 1, 2012 15:37
-
-
Save vsajip/3812561 to your computer and use it in GitHub Desktop.
Snippet of code which produces YAML metadata
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# Copyright (C) 2012 Vinay sajip. All rights reserved. | |
# | |
def produce_output(self, data, stream=None): | |
self.data = data | |
metadata = {} | |
files = { | |
} | |
result = { | |
'version': 1, | |
'metadata': metadata, | |
'source': files, | |
} | |
for name in ( | |
'name', 'version', 'author', 'maintainer', 'license', | |
'classifiers', 'keywords'): | |
if name in data: | |
s = data[name] | |
if isinstance(s, bytes): | |
s = s.decode('utf-8') | |
elif not isinstance(s, string_types): | |
s = text_type(s) | |
metadata[name] = s | |
if name == 'version': | |
metadata['normalized-version'] = suggest_version(s) | |
for old_name, new_name in ( | |
('author_email', 'author-email'), | |
('maintainer_email', 'maintainer-email'), | |
('description', 'summary'), | |
('long_description', 'description'), | |
('url', 'home-page'), | |
('download_url', 'download-url'), | |
('test_suite', 'test-suite'), | |
('test_loader', 'test-loader'), | |
('use_2to3', 'use-2to3'), | |
('package_dir', 'package-dirs'), | |
('platforms', 'platform')): | |
if old_name in data: | |
s = data[old_name] | |
if old_name == 'package_dir': | |
if s is None: | |
s = {} | |
elif isinstance(s, bytes): | |
s = s.decode('utf-8') | |
elif not isinstance(s, (string_types, list, dict)): | |
s = text_type(s) | |
metadata[new_name] = s | |
# source files | |
fn = os.path.join(self.projdir, 'MANIFEST.in') | |
self.default_manifest(files) | |
if os.path.exists(fn): | |
self.read_manifest(fn, files) | |
else: | |
# See if MANIFEST exists and is not generated | |
fn = os.path.join(self.projdir, 'MANIFEST') | |
if os.path.exists(fn): | |
with open(fn) as f: | |
lines = f.readlines() | |
for i, line in enumerate(lines): | |
line = line.strip() | |
#if i == 0 and line == ('# file GENERATED by distutils, ' | |
# 'do NOT edit\n'): | |
# break | |
if line.startswith('#'): | |
continue | |
line = os.path.normpath(line) | |
files.setdefault('include', []).append(line) | |
# core dependencies | |
deps = [] | |
have_reqs = any(d in data for d in ('install_requires', | |
'setup_requires', | |
'tests_require', | |
'extras_require')) | |
if have_reqs: | |
result['requirements'] = reqs = {} | |
if 'install_requires' in data: | |
reqs['install'] = self.get_dependencies( | |
data['install_requires']) | |
if 'setup_requires' in data: | |
reqs['setup'] = self.get_dependencies( | |
data['setup_requires']) | |
if 'tests_require' in data: | |
reqs['test'] = self.get_dependencies( | |
data['tests_require']) | |
if 'extras_require' in data: | |
reqs['extras'] = extras = {} | |
for extra_name, rlist in sorted(data['extras_require'].items()): | |
extras[extra_name] = self.get_dependencies(rlist) | |
if 'dependency-links' in data: | |
result['external-locations'] = data['dependency-links'] | |
# scripts | |
if 'scripts' in data: | |
result['scripts'] = slist = data['scripts'] | |
for s in slist: | |
files.setdefault('include', []).append(os.path.normpath(s)) | |
# extension modules | |
if 'ext_modules' in data: | |
result['extensions'] = extns = {} | |
do_extensions(data['ext_modules'], extns) | |
# features | |
if 'features' in data: | |
result['features'] = features = {} | |
for k, v in data['features'].items(): | |
args = v.kwargs | |
if len(v.args) > 0: | |
args['description'] = v.args[0] | |
extns = args.pop('ext_modules', None) | |
if extns: | |
args['extensions'] = d = {} | |
do_extensions(extns, d) | |
features[k] = args | |
if 'libraries' in data: | |
result['libraries'] = data['libraries'] | |
# data files | |
if 'data_files' in data and data['data_files'] is not None: | |
result.setdefault('source', {})['data-files'] = files = [] | |
for t in data['data_files']: | |
if (isinstance(t, (list, tuple)) and len(t) == 2 and | |
isinstance(t[1], (list, tuple))): | |
t = list(t) | |
else: | |
assert isinstance(t, string_types) | |
t = os.path.normpath(t) | |
files.append(t) | |
# entry points | |
if 'entry_points' in data: | |
ep = data['entry_points'] | |
if not isinstance(ep, dict): | |
# convert ini-section to dict | |
eps = ep | |
ep = {} | |
m = SECTION.search(eps) | |
while m: | |
k = m.group(1) | |
eps = eps[m.end():] | |
m = ITEM.search(eps) | |
while m: | |
ep.setdefault(k, []).append('%s = %s' % (m.group(1), | |
m.group(3))) | |
eps = eps[m.end():] | |
m = ITEM.search(eps) | |
ms = SECTION.search(eps) | |
if ms and m and ms.start() < m.start(): | |
m = ms | |
break | |
scripts = ep.pop('console_scripts', None) | |
if scripts: | |
ep.setdefault('scripts', {})['console'] = scripts | |
scripts = ep.pop('gui_scripts', None) | |
if scripts: | |
ep.setdefault('scripts', {})['gui'] = scripts | |
if ep: | |
result['registry'] = ep | |
# custom commands | |
if 'cmdclass' in data: | |
result['custom-commands'] = sorted(data['cmdclass']) | |
# output | |
if stream is None: | |
stream = sys.stdout | |
stream.write('#\n# Auto-generated file - bear in mind if editing\n#\n') | |
s = yaml.dump(result, Dumper=CustomDumper, default_flow_style=False) | |
try: | |
stream.write(s) | |
except UnicodeError: | |
logger.exception('%s: failed to write string %r to %s', | |
sys.argv, s, stream) | |
raise | |
try: | |
s = json.dumps(result, ensure_ascii=False) | |
stream.write('\n#%s\n'% s) | |
except UnicodeError as e: | |
logger.exception('%s: failed to write json string %r to %s', | |
sys.argv, s, stream) | |
raise | |
return result |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment