File size: 885 Bytes
5af7e8d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from torch import nn
from transformers import BertForMaskedLM, PreTrainedModel

from src.config import PunctuationBertConfig


class BertForPunctuation(PreTrainedModel):
    config_class = PunctuationBertConfig

    def __init__(self, config):
        super().__init__(config)
        # segment_size equal backward_context + forward_context + predicted token + pause token
        segment_size = config.backward_context + config.forward_context + 2
        bert_vocab_size = config.vocab_size
        self.bert = BertForMaskedLM(config)
        self.bn = nn.BatchNorm1d(segment_size * bert_vocab_size)
        self.fc = nn.Linear(segment_size * bert_vocab_size, config.output_size)
        self.dropout = nn.Dropout(config.dropout)

    def forward(self, x):
        x = self.bert(x)[0]
        x = x.view(x.shape[0], -1)
        x = self.fc(self.dropout(self.bn(x)))
        return x