LUAR-CRUD / config.py
rrivera1849's picture
Upload LUAR
291e050
raw
history blame
271 Bytes
from transformers import PretrainedConfig
class LUARConfig(PretrainedConfig):
model_type = "LUAR"
def __init__(self,
embedding_size: int = 512,
**kwargs,
):
self.embedding_size = embedding_size
super().__init__(**kwargs)