Skip to content

Instantly share code, notes, and snippets.

@thearn
Created March 11, 2015 17:48
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thearn/0201985ea04879baefc5 to your computer and use it in GitHub Desktop.
Save thearn/0201985ea04879baefc5 to your computer and use it in GitHub Desktop.
simple complicated component
from openmdao.main.api import Component, Assembly
from openmdao.main.datatypes.api import Float
from pyoptsparse_driver.pyoptsparse_driver import pyOptSparseDriver
from pyopt_driver import pyopt_driver
from simple import SimpleComp, SimpleCompNoDeriv
import time
class parallel(Assembly):
def configure(self):
self.add("driver", pyOptSparseDriver())
self.driver.optimizer = 'SNOPT'
self.driver.gradient_options.lin_solver = 'petsc_ksp'
#self.driver.gradient_options.force_fd = True
# self.add("driver", pyopt_driver.pyOptDriver())
# self.driver.optimizer = "SNOPT"
# self.driver.options = {'Major optimality tolerance': 1e-3,
# 'Iterations limit': 500000000,
# "New basis file": 10}
for i in xrange(4):
self.add("a%i" % i, SimpleComp())
self.driver.workflow.add("a%i" % i)
self.driver.add_parameter("a%i.x" % i, low=-1, high=1)
self.driver.add_parameter("a%i.y" % i, low=-1, high=1)
self.driver.add_parameter("a%i.A" % i, low=-1, high=1)
for i in xrange(2):
self.add("b%i" % i, SimpleComp())
self.driver.workflow.add("b%i" % i)
self.connect("a0.z", "b0.x")
self.connect("a0.z", "b0.A[0]")
self.connect("a1.z", "b0.y")
self.connect("a1.z", "b0.A[1]")
self.connect("a2.z", "b1.x")
self.connect("a2.z", "b1.A[0]")
self.connect("a3.z", "b1.y")
self.connect("a3.z", "b1.A[1]")
for i in xrange(2):
self.add("c%i" % i, SimpleComp())
self.driver.workflow.add("c%i" % i)
self.connect("b%i.z" % i, "c%i.x" % i)
self.connect("b%i.z" % i, "c%i.A[5]" % i)
self.driver.add_constraint("c%i.x < 1" % i)
for i in xrange(4):
self.add("d%i" % i, SimpleComp())
self.driver.workflow.add("d%i" % i)
self.connect("c0.z", "d%i.x" % i)
self.connect("c0.z", "d%i.A[0]" % i)
self.connect("c1.z", "d%i.y" % i)
self.connect("c1.z", "d%i.A[50]" % i)
#self.driver.add_constraint("d0.y < c0.y")
for i in xrange(4):
self.add("e%i" % i, SimpleComp())
self.driver.workflow.add("e%i" % i)
self.connect("d%i.z" % i, "e%i.x" % i)
self.connect("d%i.z" % i, "e%i.y" % i)
self.connect("d%i.z" % i, "e%i.A[0]" % i)
expr = "log(" + ('+'.join(["e%i.z" % i for i in xrange(4)])) + ") + a0.z"
self.driver.add_objective(expr)
if __name__ == "__main__":
import networkx as nx
top = parallel()
top._setup()
t = time.time()
top.driver.gradient_options.derivative_direction = "forward"
top.run()
print "total time", time.time() - t
# from openmdao.util.dotgraph import plot_system_tree
# plot_system_tree(top._system)
# print "time:", time.time() - t
# print top._pseudo_0.out0
# graph = top._depgraph.component_graph()
# #graph = assembly.driver.workflow._derivative_graph.component_graph()
# defaults = ["derivative_exec_count", "directory", "itername", "exec_count",
# "force_execute", "driver"]
# remove = []
# for node in graph.nodes_iter():
# for d in defaults:
# if d in node:
# remove.append(node)
# break
# for node in remove:
# graph.remove_node(node)
# ag = nx.to_agraph(graph)
# ag.layout("dot")
# ag.draw('design.pdf')
from openmdao.main.api import Component, Assembly
from openmdao.main.datatypes.api import Float, Array
import numpy as np
class SimpleCompNoDeriv(Component):
x = Float(np.pi, iotype="in")
y = Float(2., iotype="in")
A = Array(np.zeros(75, dtype=np.float), iotype="in")
z = Float(3., iotype="out")
def execute(self):
for i in xrange(10000):
self.z = self.x**2 * self.y + sum(self.A - 3.)
class SimpleComp(Component):
x = Float(np.pi, iotype="in")
y = Float(2., iotype="in")
A = Array(np.zeros(75, dtype=np.float), iotype="in")
z = Float(3., iotype="out")
def execute(self):
self.z = self.x**2 * self.y + sum(self.A - 3.)
def list_deriv_vars(self):
return ("x", "y", "A",), ("z",)
def provideJ(self):
self.j1 = 2*self.x*self.y
self.j2 = self.x**2
def apply_deriv(self, arg, result):
if "z" in result:
if "x" in arg:
result["z"] += arg["x"]*self.j1
if "y" in arg:
result["z"] += arg["y"]*self.j2
if "A" in arg:
result["z"] += arg["A"].sum()
def apply_derivT(self, arg, result):
if "z" in arg:
if "x" in result:
result["x"] += arg["z"]*self.j1
if "y" in result:
result["y"] += arg["z"]*self.j2
if "A" in result:
result["A"] += arg["z"]
if __name__ == "__main__":
s = SimpleComp()
s.check_gradient(mode="adjoint")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment