File size: 296 Bytes
3f96a16 881b143 3f96a16 881b143 3f96a16 881b143 |
1 2 3 4 5 6 7 8 9 10 11 12 |
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
class SharedEmbedding(nn.Embedding):
def forward(self, input: Tensor, unembed: bool = False) -> Tensor:
if unembed:
return F.linear(input, self.weight)
return super().forward(input)
|