Skip to content

Instantly share code, notes, and snippets.

@bbrelje
Created April 19, 2020 20:23
Show Gist options
  • Save bbrelje/58667f4244cde6890f102bf78897f624 to your computer and use it in GitHub Desktop.
Save bbrelje/58667f4244cde6890f102bf78897f624 to your computer and use it in GitHub Desktop.
Adding a distributed component in the group forces the correct behavior
import openmdao.api as om
import numpy as np
import mpi4py.MPI as MPI
SRC_INDICES = [1,2]
FORCE_PETSCTRANSFER = False
class TestCompDist(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = True
def setup(self):
# src_indices = 1 will not raise an error, but reads a value of 1.0 on every proc
self.add_input('x', shape=2)
self.add_output('y', shape=1)
self.declare_partials('y', 'x', val=1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
class TestComp(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = False
def setup(self):
# src_indices = 1 will not raise an error, but reads a value of 1.0 on every proc
self.add_input('x', shape=2, src_indices=SRC_INDICES, val=-2038.0)
self.add_output('y', shape=1)
self.declare_partials('y', 'x')
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
def compute_partials(self, inputs, J):
J['y', 'x'] = np.ones((2,))
prob = om.Problem()
model = prob.model
# nobody should ever do this but...
if MPI.COMM_WORLD.rank == 0:
setval = np.array([2.0,3.0])
else:
setval = np.array([10.0,20.0])
# no parallel or distributed comps, so default_vector is used (local xfer only)
model.add_subsystem('p1', om.IndepVarComp('x', setval))
model.add_subsystem('c3', TestComp())
if FORCE_PETSCTRANSFER:
model.add_subsystem('c4', TestCompDist())
model.connect("p1.x", "c3.x")
prob.setup(check=False, mode='fwd')
prob.run_model()
print('rank: ' + str(MPI.COMM_WORLD.rank) + ' val: ' + str(prob['c3.y']) + ' should be 13')
# list_outputs only shows the value on the first proc (0th)
prob.model.list_outputs()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment