Skip to content

Instantly share code, notes, and snippets.

# link to package https://github.com/lucidrains/slot-attention
import torch
from torch import nn
class Residual(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x):