Skip to content

Instantly share code, notes, and snippets.

@davidhughhenrymack
Created July 26, 2018 19:59
Show Gist options
  • Save davidhughhenrymack/8a4e7e9164f574a0f90716852b746450 to your computer and use it in GitHub Desktop.
Save davidhughhenrymack/8a4e7e9164f574a0f90716852b746450 to your computer and use it in GitHub Desktop.
A little idea to test out
import tensorflow as tf
'''
The mini-inception (mi) library
This is inspired by Google's inception network
and DARTS architecture search. I didn't get fancy
on the bilevel optimization, so let's see how it goes!!
'''
def mi_activation(tensor):
with tf.name_scope("mi_activation"):
activations = [
tf.tanh, tf.nn.sigmoid, tf.nn.relu, tf.identity
]
choice = tf.variable("activation_choice", [4])
choice = tf.nn.softmax(choice)
t = [i[0](tensor)*i[1] for i in zip(activations, choice)]
t = tf.reduce_sum(t)
return t
def mi_residual(tensor, width):
with tf.name_scope("mi_residual"):
choice = tf.variable("choice", [2])
choice = tf.nn.softmax(choice)
left = choice[0] * tf.layers.dense(
mi_activation(
tf.layers.dense(tensor, width)
)
, width)
right = choice[1] * tensor
join = left + right
out = mi_activation(join)
return join
def mi_deep(tensor, width, depth):
with tf.name_scope("mi_deep"):
t = tensor
for i in range(depth // 2):
t = mi_residual(t, width)
for i in range(depth % 2):
t = tf.layers.dense(t, width)
t = mi_activation(t)
return t
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment