Created
July 26, 2018 19:59
-
-
Save davidhughhenrymack/8a4e7e9164f574a0f90716852b746450 to your computer and use it in GitHub Desktop.
A little idea to test out
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
''' | |
The mini-inception (mi) library | |
This is inspired by Google's inception network | |
and DARTS architecture search. I didn't get fancy | |
on the bilevel optimization, so let's see how it goes!! | |
''' | |
def mi_activation(tensor): | |
with tf.name_scope("mi_activation"): | |
activations = [ | |
tf.tanh, tf.nn.sigmoid, tf.nn.relu, tf.identity | |
] | |
choice = tf.variable("activation_choice", [4]) | |
choice = tf.nn.softmax(choice) | |
t = [i[0](tensor)*i[1] for i in zip(activations, choice)] | |
t = tf.reduce_sum(t) | |
return t | |
def mi_residual(tensor, width): | |
with tf.name_scope("mi_residual"): | |
choice = tf.variable("choice", [2]) | |
choice = tf.nn.softmax(choice) | |
left = choice[0] * tf.layers.dense( | |
mi_activation( | |
tf.layers.dense(tensor, width) | |
) | |
, width) | |
right = choice[1] * tensor | |
join = left + right | |
out = mi_activation(join) | |
return join | |
def mi_deep(tensor, width, depth): | |
with tf.name_scope("mi_deep"): | |
t = tensor | |
for i in range(depth // 2): | |
t = mi_residual(t, width) | |
for i in range(depth % 2): | |
t = tf.layers.dense(t, width) | |
t = mi_activation(t) | |
return t | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment