Skip to content

Instantly share code, notes, and snippets.

@bbrelje
bbrelje / parallel_coloring.py
Created May 8, 2020 13:26
Parallel coloring duplicate calls to matrix-free API
import openmdao.api as om
import numpy as np
import time
from mpi4py import MPI
class SumComp(om.ExplicitComponent):
def __init__(self, size):
super(SumComp, self).__init__()
self.size = size
@bbrelje
bbrelje / pargroup_output_as_objective.py
Created May 8, 2020 02:36
Check_totals fails when objective is an output of a parallel component
import numpy as np
import openmdao.api as om
import time
class DelayComp(om.ExplicitComponent):
def initialize(self):
self.options.declare('time', default=3.0)
self.options.declare('size', default=1)
@bbrelje
bbrelje / get_val_distributed.py
Created April 22, 2020 21:47
get_val on distributed output - incorrect behavior
import openmdao.api as om
import numpy as np
from openmdao.utils.array_utils import evenly_distrib_idxs
import mpi4py.MPI as MPI
N = 3
class DistribComp(om.ExplicitComponent):
@bbrelje
bbrelje / empty_rowcol.py
Last active May 6, 2020 20:32
Can't declare empty rows/cols in declare_partials
import openmdao.api as om
import numpy as np
from openmdao.utils.array_utils import evenly_distrib_idxs
import mpi4py.MPI as MPI
N = 3
class DistribComp(om.ExplicitComponent):
@bbrelje
bbrelje / src_indices_oob.py
Created April 19, 2020 21:58
out of bounds src_indices not caught when 1D list of indices used
import openmdao.api as om
import numpy as np
import mpi4py.MPI as MPI
# The system will catch that 21 is out of bounds if specified as a list of tuples
# SRC_INDICES = [(1,),(21,)]
# The system will catch that 21 is out of bounds if it is first in the flat list
# It interprets this the same way as a list of 1D tuples
# SRC_INDICES = [21, 1]
# The system will NOT catch that 21 is out of bounds if it is not first in the flat list
@bbrelje
bbrelje / src_indices_nondist_comp.py
Created April 19, 2020 20:23
Adding a distributed component in the group forces the correct behavior
import openmdao.api as om
import numpy as np
import mpi4py.MPI as MPI
SRC_INDICES = [1,2]
FORCE_PETSCTRANSFER = False
class TestCompDist(om.ExplicitComponent):
def initialize(self):
@bbrelje
bbrelje / illegal_remote_connection.py
Last active April 19, 2020 19:12
Local transfer with out of bounds src_index
import openmdao.api as om
import mpi4py.MPI as MPI
SRC_INDICES = 1
class TestComp(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = False
import openmdao.api as om
import mpi4py.MPI as MPI
class TestComp(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = False
def setup(self):
self.add_input('x', shape=1)
@bbrelje
bbrelje / distrib_jacobian.py
Last active April 19, 2020 11:13
Defining distributed partials element-by-element doesn't work
import openmdao.api as om
import numpy as np
from openmdao.utils.array_utils import evenly_distrib_idxs
N = 3
class DistribCompNoWork(om.ExplicitComponent):
def initialize(self):
import openmdao.api as om
prob = om.Problem()
model = prob.model
model.add_subsystem('p1', om.IndepVarComp('x', 1.0))
model.add_subsystem('p2', om.IndepVarComp('x', 1.0))
parallel = model.add_subsystem('parallel', om.ParallelGroup())
parallel.add_subsystem('c1', om.ExecComp(['y=-2.0*x']))