File size: 1,473 Bytes
295f18f 071bd94 295f18f c3dc0bd ec27a46 c3dc0bd 295f18f c8b63c8 295f18f ec27a46 071bd94 295f18f ec27a46 c8b63c8 295f18f c3dc0bd 295f18f 0bff46d 295f18f f10bb6c 295f18f ec27a46 295f18f 071bd94 295f18f 071bd94 c8b63c8 ec27a46 c8b63c8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
{
"_name_or_path": "/tmp/Helsinki-NLP/opus-mt-en-es",
"activation_dropout": 0.0,
"activation_function": "swish",
"add_bias_logits": false,
"add_final_layer_norm": false,
"architectures": [
"MarianMTModel"
],
"attention_dropout": 0.0,
"bad_words_ids": [
[
65000
]
],
"bos_token_id": 0,
"classif_dropout": 0.0,
"classifier_dropout": 0.0,
"d_model": 512,
"decoder_attention_heads": 8,
"decoder_ffn_dim": 2048,
"decoder_layerdrop": 0.0,
"decoder_layers": 6,
"decoder_start_token_id": 65000,
"decoder_vocab_size": 65001,
"dropout": 0.1,
"encoder_attention_heads": 8,
"encoder_ffn_dim": 2048,
"encoder_layerdrop": 0.0,
"encoder_layers": 6,
"eos_token_id": 0,
"extra_pos_embeddings": 65001,
"force_bos_token_to_be_generated": false,
"forced_eos_token_id": 0,
"gradient_checkpointing": false,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1",
"2": "LABEL_2"
},
"init_std": 0.02,
"is_encoder_decoder": true,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2
},
"max_length": 512,
"max_position_embeddings": 512,
"model_type": "marian",
"normalize_before": false,
"normalize_embedding": false,
"num_beams": 4,
"num_hidden_layers": 6,
"pad_token_id": 65000,
"scale_embedding": true,
"share_encoder_decoder_embeddings": true,
"static_position_embeddings": true,
"transformers_version": "4.22.0.dev0",
"use_cache": true,
"vocab_size": 65001
}
|