from transformers import PretrainedConfig | |
class LUARConfig(PretrainedConfig): | |
model_type = "LUAR" | |
def __init__(self, | |
embedding_size: int = 512, | |
**kwargs, | |
): | |
self.embedding_size = embedding_size | |
super().__init__(**kwargs) |
from transformers import PretrainedConfig | |
class LUARConfig(PretrainedConfig): | |
model_type = "LUAR" | |
def __init__(self, | |
embedding_size: int = 512, | |
**kwargs, | |
): | |
self.embedding_size = embedding_size | |
super().__init__(**kwargs) |