Skip to content

Instantly share code, notes, and snippets.

@tommeier
Created August 29, 2014 01:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save tommeier/ff660205271f82a47041 to your computer and use it in GitHub Desktop.
Save tommeier/ff660205271f82a47041 to your computer and use it in GitHub Desktop.
oclint-json-compilation-database with parallel execution
#!/usr/bin/env python
# Test run with parallel
import os
import platform
import json
import argparse
import re
import subprocess
import sys
import multiprocessing
import subprocess
def run_oclint_command(oclint_command):
print '-------------------------- OCLint Command --------------------------'
print oclint_command
print '--------------------------------------------------------------------'
exit_code = subprocess.call(oclint_command, shell=True)
return exit_code
if __name__ == '__main__':
OCLINT_BIN_FOLDER = os.path.dirname(os.path.abspath(__file__))
OCLINT_BIN = OCLINT_BIN_FOLDER + os.sep + "oclint"
if platform.system() == "Windows":
OCLINT_BIN += ".exe"
CURRENT_WORKING_DIRECTORY = os.getcwd()
JSON_COMPILATION_DATABASE = CURRENT_WORKING_DIRECTORY + os.sep + "compile_commands.json"
arg_parser = argparse.ArgumentParser(description='OCLint for JSON Compilation Database (compile_commands.json)')
arg_parser.add_argument("-v", action="store_true", dest="invocation", help="show invocation command with arguments")
arg_parser.add_argument('-debug', '--debug', action="store_true", dest="debug", help="invoke OCLint in debug mode")
arg_parser.add_argument('-i', '-include', '--include', action='append', dest='includes', help="extract files matching pattern")
arg_parser.add_argument('-e', '-exclude', '--exclude', action='append', dest='excludes', help="remove files matching pattern")
arg_parser.add_argument('oclint_args', nargs='*', help="arguments that are passed to OCLint invocation")
args = arg_parser.parse_args()
def get_source_path(file_attr, dir_attr):
if file_attr.startswith(os.sep):
return file_attr
elif dir_attr.endswith(os.sep):
return dir_attr + file_attr
else:
return dir_attr + os.sep + file_attr
def source_exist_at(path):
return os.path.isfile(path)
def source_list_inclusion_filter(source_list, inclusion_filter):
filtered_list = []
for path in source_list:
if re.search(inclusion_filter, path):
filtered_list.append(path)
return filtered_list
def source_list_exclusion_filter(source_list, exclusion_filter):
filtered_list = []
for path in source_list:
if not re.search(exclusion_filter, path):
filtered_list.append(path)
return filtered_list
if not source_exist_at(OCLINT_BIN):
print "Error: OCLint executable file not found."
sys.exit(99)
if not source_exist_at(JSON_COMPILATION_DATABASE):
print "Error: compile_commands.json not found at current location."
sys.exit(98)
compilation_database = json.load(open(JSON_COMPILATION_DATABASE))
source_list = []
for file_item in compilation_database:
file_path = file_item["file"]
if not platform.system() == "Windows":
file_path = get_source_path(file_item["file"], file_item["directory"])
if source_exist_at(file_path) and not file_path in source_list:
source_list.append(file_path)
if args.includes:
for inclusion_filter in args.includes:
source_list = source_list_inclusion_filter(source_list, inclusion_filter)
if args.excludes:
for exclusion_filter in args.excludes:
source_list = source_list_exclusion_filter(source_list, exclusion_filter)
source_paths = '"' + '" "'.join(source_list) + '"'
oclint_arguments = ''
if args.oclint_args:
oclint_arguments = ' ' + ' '.join(args.oclint_args)
debug_argument = ''
if args.debug:
debug_argument = ' -debug'
oclint_invocation_calls = []
for path in source_list:
oclint_invocation = OCLINT_BIN + debug_argument + oclint_arguments + ' \"' + path + '\"'
oclint_invocation_calls.append(oclint_invocation)
pool = multiprocessing.Pool(None) #use all cpus
results = []
r = pool.map_async(run_oclint_command, oclint_invocation_calls, callback=results.append)
r.wait() # Wait on the results
results = sorted(results[0])
print "All results:"
print results
worst_result = results[-1]
#Todo: Munge all results and output into one?
sys.exit(worst_result)
@plasticine
Copy link

LOLTOM.

@tommeier
Copy link
Author

Current run for 100 files:

  • Old way: 4min50 seconds
  • Parallel (on 8 cores macbook pro): 1min22seconds

@nullobject
Copy link

🐍

@minid33
Copy link

minid33 commented Aug 29, 2014

👍 Good stuff, it's close to what I had made. Looks like you get similar results too.

@michaeleiselsc
Copy link

parallel:

real	0m46.171s
user	0m35.735s
sys	0m6.102s

serial:

real	0m31.971s
user	0m27.851s
sys	0m2.257s

so i'm not getting great parallelization. the cpu usage seems to be pretty low as well. i think that this is causing too much overhead for each file it lints

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment