Skip to content

Instantly share code, notes, and snippets.

@davidshepherd7
Created January 29, 2013 18:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save davidshepherd7/4666285 to your computer and use it in GitHub Desktop.
Save davidshepherd7/4666285 to your computer and use it in GitHub Desktop.
example parallel parameter sweep for an oomph-lib driver
#!/usr/bin/env python
import subprocess as subp
from multiprocessing import Pool
import itertools
import sys
import numbers
import argparse
program='./spatially_constant_m_length_variations'
def dirname_from_parameters(parameters):
"""Convert a list of parameters to a bash-safe(ish) directory name."""
base = "loop_results/sp"
for p in parameters:
base += "_" + str(p)
# Replace characters that could go wrong in bash/c, probably lots
# more needed here...
return base.replace('-','m')
def myrun(parameter_list_as_strings):
"""Make results directory, run the program with the given parameters
and store the trace.
"""
outdir = dirname_from_parameters(parameter_list_as_strings)
subp.call(['mkdir', '-p', outdir])
tracefile = open(outdir+"/trace", 'w')
# Write parameter list to file
paramfile = open(outdir+'/parameters', 'w')
for p in parameter_list_as_strings:
paramfile.write(p + " ")
paramfile.write('\n')
paramfile.close()
[eps, dt, mconstraintmethod, damping, hk, tmax] = parameter_list_as_strings
print "Running",eps,dt,mconstraintmethod,damping,hk,tmax
subp.call([program,
'-eps', eps,
'-mconstraintmethod', mconstraintmethod,
'-damp', damping,
'-hk', hk,
'-outdir', outdir,
'-dt', dt,
'-tmax', tmax],
stdout=tracefile)
tracefile.close()
return
def main():
parser = argparse.ArgumentParser(description='Hi')
parser.add_argument('--mass-loop','-m', action="store_true", default=False,
help='Run a loop over many inputs for a short tmax.')
parser.add_argument('--long-loop', '-l', action="store_true", default=False,
help='Run a loop over fewer inputs for a longer tmax.')
args = parser.parse_args()
# Call make, just in case
subp.call(["make"])
# Dispatch
if args.mass_loop:
mass_loop()
if args.long_loop:
long_loop()
return 0 # Exit successfully
def mass_loop():
"""
"""
# Input lists of parameters to be used
epsilons = [0]
dts = [0.1, 0.05, 0.01, 0.005, 0.001, 0.0005, 0.0001]
mconstraintmethods = [0, 1, 2]
dampings = [1.0, 0.5, 0.1, 0.05, 0.01, 0.005]
hks = [0.0]
tmax = [1.001]
arguments = [epsilons, dts, mconstraintmethods, dampings, hks, tmax]
parallel_run_all(arguments)
return
def long_loop():
"""
"""
epsilons = [0]
dts = [0.1, 0.05, 0.01, 0.005]
mconstraintmethods = [0, 1, 2]
dampings = [0.1]
hks = [0.0]
tmax = [-1]
arguments = [epsilons, dts, mconstraintmethods, dampings, hks, tmax]
parallel_run_all(arguments)
return
def parallel_run_all(arglist):
""" Takes a list of lists of arguments, constructs all combinations of
the arguments then runs them all (on 7 cores).
"""
# Convert to a list of lists of strings ready for using call and
# map_async.
args_as_strings = map(lambda x: map(str, x), arglist)
# Create a list (well, actually an iterator but it doesn't matter) of
# all possible combinations of arguments (i.e. equivalent to nesting
# for loops over all argument lists).
it = itertools.product(*args_as_strings)
# Start worker processes then run function "myrun" using them
pool = Pool(processes=7)
pool.map_async(myrun, it) # Run all instances
pool.close() # Tell python there are no more jobs coming
pool.join() # Wait for everything to finish
return
if __name__ == "__main__":
sys.exit(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment