Skip to content

Instantly share code, notes, and snippets.

@vitchyr
Last active June 17, 2019 08:15
Show Gist options
  • Save vitchyr/bd2dfc7946c95c5291fd9416baebc051 to your computer and use it in GitHub Desktop.
Save vitchyr/bd2dfc7946c95c5291fd9416baebc051 to your computer and use it in GitHub Desktop.
Layer Norm Implementation in TensorFlow
import tensorflow as tf
LAYER_NORM_BIAS_DEFAULT_NAME = "ln_bias"
LAYER_NORM_GAIN_DEFAULT_NAME = "ln_gain"
LAYER_NORMALIZATION_DEFAULT_NAME = "layer_normalization"
def layer_normalize(
input_pre_nonlinear_activations,
input_shape,
epsilon=1e-5,
name=LAYER_NORMALIZATION_DEFAULT_NAME,
):
"""
Layer normalizes a 2D tensor along its second axis, which corresponds to
normalizing within a layer.
:param input_pre_nonlinear_activations:
:param input_shape:
:param name: Name for the variables in this layer.
:param epsilon: The actual normalized value is
```
norm = (x - mean) / sqrt(variance + epsilon)
```
for numerical stability.
:return: Layer-normalized pre-non-linear activations
"""
mean, variance = tf.nn.moments(input_pre_nonlinear_activations, [1],
keep_dims=True)
normalised_input = (input_pre_nonlinear_activations - mean) / tf.sqrt(
variance + epsilon)
with tf.variable_scope(name):
gains = tf.get_variable(
LAYER_NORM_GAIN_DEFAULT_NAME,
input_shape,
initializer=tf.constant_initializer(1.),
)
biases = tf.get_variable(
LAYER_NORM_BIAS_DEFAULT_NAME,
input_shape,
initializer=tf.constant_initializer(0.),
)
return normalised_input * gains + biases
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment