Skip to content

Instantly share code, notes, and snippets.

@geriskenderi
Forked from isaacmg/attend_preliminary.py
Created February 16, 2021 16:05
Show Gist options
  • Save geriskenderi/462df32d4f8f44be6e21173125d47ceb to your computer and use it in GitHub Desktop.
Save geriskenderi/462df32d4f8f44be6e21173125d47ceb to your computer and use it in GitHub Desktop.
class AttendDiagnose(nn.Module):
def __init__(self, number_measurements, filter_number):
super().__init__()
self.d_model = filter_number*number_measurements
self.embedding_conv = nn.Conv1d(number_measurements, filter_number*number_measurements, 1)
self.pe = PositionalEncoding(filter_number*number_measurements)
# embed_dim and attention_heads
self.masked_attn = nn.modules.activation.MultiheadAttention(filter_number*number_measurements, 8)
self.norm = nn.modules.normalization.LayerNorm(self.d_model)
self.final_layer = nn.Linear(self.d_model, 1)
def forward(self, X):
x = self.embedding_conv(X)
x= x.transpose(1,2)
x = self.pe(x)
x = self.masked_attn(x, x, x)[0]
x = self.norm(x)
x = self.final_layer(x)
print(x.shape)
def positional_encoding(X):
# TODO implement
pass
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment