Skip to content

Instantly share code, notes, and snippets.

@groverpr
Created March 22, 2020 01:52
Show Gist options
  • Save groverpr/30f6e0c56d59baf4c0524c39933141da to your computer and use it in GitHub Desktop.
Save groverpr/30f6e0c56d59baf4c0524c39933141da to your computer and use it in GitHub Desktop.
class CustomSeqNet(gluon.nn.HybridBlock):
"""
Custom defined network for sequence data that is used to predict a binary label.
"""
def __init__(self, input_output_embed_map, dense_sizes=[100], dropouts=[0.2], activation="relu"):
"""
input_output_embed_map: {"token_embed": (max_tok_idx, tok_embed_dim), "hidden_embed": (,hidden_embed_dim))}
"""
self.dense_sizes = dense_sizes # list of output dimension of dense layers
self.dropouts = dropouts # list of dropout for each dense layer
super(CustomSeqNet, self).__init__(prefix='CustomSeqNet_')
with self.name_scope(): # name space object to manage parameter names
# 1. Embedding layer
self.embed = gluon.nn.Embedding(
input_dim=input_output_embed_map["token_embed"][0],
output_dim=input_output_embed_map["token_embed"][1],
prefix='token_embed_'
) # output = (bs, sequence_len, input_size) = (N,T,C)
# 2. GRU layer
self.rnn = gluon.rnn.GRU(
hidden_size=input_output_embed_map["hidden_embed"][1],
bidirectional=True,
layout='NTC', # batch size, sequence length and feature dimensions respectively
prefix='review_gru_'
)
# 3. Dense layers
# need to specify in_units in Dense for some initialization issues
for i, sz in enumerate(self.dense_sizes):
setattr(self, "dense_{}".format(i), gluon.nn.Dense(sz))
setattr(self, "bn_dense_{}".format(
i), gluon.nn.BatchNorm(axis=1))
setattr(self, "activation_dense_{}".format(
i), gluon.nn.Activation(activation))
setattr(self, "drop_dense_{}".format(i),
gluon.nn.Dropout(self.dropouts[i]))
# 4. Output layer
self.output = gluon.nn.Dense(1, prefix="output_")
def hybrid_forward(self, F, review, review_len):
embed = self.embed(review) # 1
rnn_all = self.rnn(embed) # 2
# Extract last output in sequence
rnn_last = F.SequenceLast(rnn_all, sequence_length=F.cast(
review_len, 'float32'), use_sequence_length=True)
for i, sz in enumerate(self.dense_sizes): # 3
net = getattr(self, "dense_{}".format(i))(net) # MLP
net = getattr(self, "bn_dense_{}".format(i))(net) # BN
net = getattr(self, "activation_dense_{}".format(i))(net) # relu
net = getattr(self, "drop_dense_{}".format(i))(net) # dropouts
net = self.output(net) # 4
return net
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment