Skip to content

Instantly share code, notes, and snippets.

@bbrelje
Created June 23, 2020 13:59
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bbrelje/62f32e38f9654a2312689a93497f8fcb to your computer and use it in GitHub Desktop.
Save bbrelje/62f32e38f9654a2312689a93497f8fcb to your computer and use it in GitHub Desktop.
import openmdao.api as om
import numpy as np
import mpi4py.MPI as MPI
# RUN THIS WITH 2 or more PROCS UNDER MPI
SRC_INDICES = [1,2]
FORCE_PETSCTRANSFER = True
# this should raise an error when COMP_DISTRIBUTED is false
# this should work when COMP_DISTRIBUTED is true
COMP_DISTRIBUTED = True
class TestCompDist(om.ExplicitComponent):
# this comp is distributed and forces PETScTransfer
def initialize(self):
self.options['distributed'] = True
def setup(self):
self.add_input('x', shape=2)
self.add_output('y', shape=1)
self.declare_partials('y', 'x', val=1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
class TestComp(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = COMP_DISTRIBUTED
def setup(self):
# read SRC_INDICES on each proc
self.add_input('x', shape=2, src_indices=SRC_INDICES, val=-2038.0)
self.add_output('y', shape=1)
self.declare_partials('y', 'x')
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
def compute_partials(self, inputs, J):
J['y', 'x'] = np.ones((2,))
prob = om.Problem()
model = prob.model
# nobody should ever do this but...
if MPI.COMM_WORLD.rank == 0:
setval = np.array([2.0,3.0])
else:
setval = np.array([10.0,20.0])
# no parallel or distributed comps, so default_vector is used (local xfer only)
model.add_subsystem('p1', om.IndepVarComp('x', setval))
model.add_subsystem('c3', TestComp())
if FORCE_PETSCTRANSFER:
model.add_subsystem('c4', TestCompDist())
model.connect("p1.x", "c3.x")
prob.setup(check=False, mode='fwd')
prob.run_model()
print('rank: ' + str(MPI.COMM_WORLD.rank) + ' val: ' + str(prob['c3.y']) + ' should be 13')
# list_outputs only shows the value on the first proc (0th)
prob.model.list_outputs()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment