Skip to content

Instantly share code, notes, and snippets.

@thearn
Created January 24, 2015 16:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thearn/15281add859979834d62 to your computer and use it in GitHub Desktop.
Save thearn/15281add859979834d62 to your computer and use it in GitHub Desktop.
KS Component
import numpy as np
from openmdao.main.api import Component, Assembly, set_as_top
from openmdao.lib.datatypes.api import Float, Array
class KSComponent(Component):
"""
Aggregates a number of functions to a single value via the
Kreisselmeier-Steinhauser Function. Often used in the aggregation of
optimization constraints.
The input numerical array 'g' can be of any dimension and shape.
Initialization argument 'shape' can be either a positive integer (if the
two arrays to be compared are 1D), or a tuple of positive integers (to
specify a higher dimensional shape).
"""
rho = Float(50.0,
iotype="in",
desc="Hyperparameter for the KS function")
KS = Float(0,
iotype="out",
desc="Value of the aggregate KS function")
def __init__(self, shape):
super(KSComponent, self).__init__()
self.add('g', Array(np.zeros(shape),
dtype=np.float,
iotype="in",
desc="Array of function values to be aggregated"))
def execute(self):
g_max = np.max(self.g)
g_diff = self.g - g_max
self.exponents = np.exp(self.rho * g_diff)
self.summation = np.sum(self.exponents)
self.KS = g_max + 1.0 / self.rho * np.log(self.summation)
def list_deriv_vars(self):
return ("g",), ("KS",)
def provideJ(self):
"""
Jacobian may be dense, but it's easier to write apply_deriv and
apply_derivT when input matrix dimension/shape is arbitrary than to
always return a 2D Jacobian of partial derivatives.
"""
dsum_dg = self.rho*self.exponents
dKS_dsum = 1.0/self.rho/self.summation
self.J = dKS_dsum * dsum_dg
def apply_deriv(self, arg, result):
if 'g' in arg and 'KS' in result:
result['KS'] += np.sum(arg['g'] * self.J)
def apply_derivT(self, arg, result):
if 'g' in result and 'KS' in arg:
result['g'] += arg['KS'] * self.J
if __name__ == "__main__":
m,n = 3,3
k = KSComponent((m,n))
k.rho = 1
k.g = np.random.randn(m,n)
k.run()
k.check_gradient(mode="forward")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment