Rachel Bawden commited on
Commit
e19433f
1 Parent(s): 44acef3

added base version

Browse files
Files changed (2) hide show
  1. config.json +24 -88
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,102 +1,38 @@
1
  {
2
- "_name_or_path": "rbawden/modern_french_normalisation",
3
- "activation_dropout": 0.0,
4
- "activation_function": "relu",
5
  "architectures": [
6
- "FSMTForConditionalGeneration"
7
  ],
 
 
 
8
  "attention_dropout": 0.0,
9
- "bos_token_id": 0,
10
  "d_model": 256,
11
- "decoder": {
12
- "_name_or_path": "",
13
- "add_cross_attention": false,
14
- "architectures": null,
15
- "bad_words_ids": null,
16
- "bos_token_id": 2,
17
- "chunk_size_feed_forward": 0,
18
- "cross_attention_hidden_size": null,
19
- "decoder_start_token_id": null,
20
- "diversity_penalty": 0.0,
21
- "do_sample": false,
22
- "early_stopping": false,
23
- "encoder_no_repeat_ngram_size": 0,
24
- "eos_token_id": null,
25
- "finetuning_task": null,
26
- "forced_bos_token_id": null,
27
- "forced_eos_token_id": null,
28
- "id2label": {
29
- "0": "LABEL_0",
30
- "1": "LABEL_1"
31
- },
32
- "is_decoder": false,
33
- "is_encoder_decoder": false,
34
- "label2id": {
35
- "LABEL_0": 0,
36
- "LABEL_1": 1
37
- },
38
- "length_penalty": 1.0,
39
- "max_length": 20,
40
- "min_length": 0,
41
- "model_type": "fsmt_decoder",
42
- "no_repeat_ngram_size": 0,
43
- "num_beam_groups": 1,
44
- "num_beams": 1,
45
- "num_return_sequences": 1,
46
- "output_attentions": false,
47
- "output_hidden_states": false,
48
- "output_scores": false,
49
- "pad_token_id": null,
50
- "prefix": null,
51
- "problem_type": null,
52
- "pruned_heads": {},
53
- "remove_invalid_values": false,
54
- "repetition_penalty": 1.0,
55
- "return_dict": true,
56
- "return_dict_in_generate": false,
57
- "sep_token_id": null,
58
- "task_specific_params": null,
59
- "temperature": 1.0,
60
- "tie_encoder_decoder": false,
61
- "tie_word_embeddings": true,
62
- "tokenizer_class": null,
63
- "top_k": 50,
64
- "top_p": 1.0,
65
- "torch_dtype": null,
66
- "torchscript": false,
67
- "transformers_version": "4.12.3",
68
- "use_bfloat16": false,
69
- "vocab_size": 1000
70
- },
71
- "decoder_attention_heads": 8,
72
- "decoder_ffn_dim": 1024,
73
- "decoder_layerdrop": 0,
74
- "decoder_layers": 4,
75
- "decoder_start_token_id": 2,
76
  "dropout": 0.3,
77
- "encoder_attention_heads": 4,
78
- "encoder_ffn_dim": 1024,
79
- "encoder_layerdrop": 0,
80
- "encoder_layers": 2,
81
- "eos_token_id": 2,
82
- "forced_eos_token_id": 2,
83
  "init_std": 0.02,
84
- "is_encoder_decoder": true,
 
 
 
85
  "langs": [
86
  "src",
87
  "trg"
88
  ],
89
- "max_length": 200,
90
- "max_position_embeddings": 1024,
91
- "model_type": "fsmt",
92
- "num_beams": 5,
93
- "num_hidden_layers": 2,
 
 
 
 
94
  "pad_token_id": 1,
 
 
 
95
  "scale_embedding": true,
96
- "src_vocab_size": 1000,
97
- "tgt_vocab_size": 1000,
98
  "tie_word_embeddings": true,
99
- "torch_dtype": "float32",
100
- "transformers_version": null,
101
- "use_cache": true
102
- }
 
1
  {
 
 
 
2
  "architectures": [
3
+ "AutoModelForSeq2SeqLM"
4
  ],
5
+ "model_type": "fsmt",
6
+ "activation_dropout": 0.0,
7
+ "activation_function": "relu",
8
  "attention_dropout": 0.0,
 
9
  "d_model": 256,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  "dropout": 0.3,
 
 
 
 
 
 
11
  "init_std": 0.02,
12
+ "max_position_embeddings": 1024,
13
+ "num_hidden_layers": 2,
14
+ "src_vocab_size": 1000,
15
+ "tgt_vocab_size": 1000,
16
  "langs": [
17
  "src",
18
  "trg"
19
  ],
20
+ "encoder_attention_heads": 4,
21
+ "encoder_ffn_dim": 1024,
22
+ "encoder_layerdrop": 0,
23
+ "encoder_layers": 2,
24
+ "decoder_attention_heads": 8,
25
+ "decoder_ffn_dim": 1024,
26
+ "decoder_layerdrop": 0,
27
+ "decoder_layers": 4,
28
+ "bos_token_id": 0,
29
  "pad_token_id": 1,
30
+ "eos_token_id": 2,
31
+ "unk_token_id": 3,
32
+ "is_encoder_decoder": true,
33
  "scale_embedding": true,
 
 
34
  "tie_word_embeddings": true,
35
+ "num_beams": 5,
36
+ "early_stopping": false,
37
+ "length_penalty": 1.0
38
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73ac91f06e9c23157fcc1ace5f86ee46edf2f62464076379ea83f088fc1fb082
3
- size 25266477
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2de8841768bff7b3091027d8ddbf6fb31b77c84bd691b7e44b6b76cc2a843936
3
+ size 24240367