moral_immigration_it / configuration_moral.py
brema76's picture
Upload BertItaliano
6bfcc98
raw
history blame
1.82 kB
from transformers import PretrainedConfig
class BertItalianoConfig(PretrainedConfig):
model_type="BertItaliano"
def __init__(
self,
attention_probs_dropout_prob: float = 0.1,
gradient_checkpointing: bool = False,
hidden_act="gelu",
hidden_dropout_prob: float = 0.1,
hidden_size: int = 768,
initializer_range: float = 0.02,
intermediate_size: int = 3072,
layer_norm_eps: float = 1e-12,
max_position_embeddings: int = 512,
num_attention_heads: int = 12,
num_hidden_layers: int = 12,
pad_token_id: int = 0,
position_embedding_type="absolute",
transformers_version="4.10.3",
torch_dtype="float32",
type_vocab_size: int = 2,
use_cache: bool = True,
vocab_size: int = 32102,
**kwargs,
):
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.gradient_checkpointing = gradient_checkpointing
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.hidden_size = hidden_size
self.initializer_range = initializer_range
self.intermediate_size = intermediate_size
self.layer_norm_eps = layer_norm_eps
self.max_position_embeddings = max_position_embeddings
self.num_attention_heads = num_attention_heads
self.num_hidden_layers = num_hidden_layers
self.pad_token_id = pad_token_id
self.position_embedding_type = position_embedding_type
self.transformers_version = transformers_version
self.torch_dtype = torch_dtype
self.type_vocab_size = type_vocab_size
self.use_cache = use_cache
self.vocab_size = vocab_size
super().__init__(**kwargs)