Skip to content

Instantly share code, notes, and snippets.

@isaacmg
Created November 1, 2019 05:20
Show Gist options
  • Save isaacmg/26144ecbcb9fe173c2c013b8c2beefe3 to your computer and use it in GitHub Desktop.
Save isaacmg/26144ecbcb9fe173c2c013b8c2beefe3 to your computer and use it in GitHub Desktop.
class AttendDiagnose(nn.Module):
def __init__(self, number_measurements, filter_number):
super().__init__()
self.d_model = filter_number*number_measurements
self.embedding_conv = nn.Conv1d(number_measurements, filter_number*number_measurements, 1)
self.pe = PositionalEncoding(filter_number*number_measurements)
# embed_dim and attention_heads
self.masked_attn = nn.modules.activation.MultiheadAttention(filter_number*number_measurements, 8)
self.norm = nn.modules.normalization.LayerNorm(self.d_model)
self.final_layer = nn.Linear(self.d_model, 1)
def forward(self, X):
x = self.embedding_conv(X)
x= x.transpose(1,2)
x = self.pe(x)
x = self.masked_attn(x, x, x)[0]
x = self.norm(x)
x = self.final_layer(x)
print(x.shape)
def positional_encoding(X):
# TODO implement
pass
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment