Text Generation
Transformers
mpt
Composer
MosaicML
llm-foundry
custom_code
File size: 305 Bytes
09dd4fb
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor

class SharedEmbedding(nn.Embedding):

    def forward(self, input: Tensor, unembed: bool=False) -> Tensor:
        if unembed:
            return F.linear(input, self.weight)
        return super().forward(input)