Skip to content

Instantly share code, notes, and snippets.

import torch
import torch.nn as nn
import math
# 定义 Positional Encoding 层
class PositionalEncoding(nn.Module):
def __init__(self, embedding_dim, max_len=5000):
super(PositionalEncoding, self).__init__()
position = torch.arange(0, max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, embedding_dim, 2) * -(math.log(10000.0) / embedding_dim))