File size: 271 Bytes
291e050
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12

from transformers import PretrainedConfig

class LUARConfig(PretrainedConfig):
    model_type = "LUAR"
    
    def __init__(self,
        embedding_size: int = 512,
        **kwargs,
    ):
        self.embedding_size = embedding_size
        super().__init__(**kwargs)