raynardj commited on
Commit
ee7c761
1 Parent(s): 703a441
Files changed (2) hide show
  1. config.json +5 -5
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -37,7 +37,7 @@
37
  },
38
  "layer_norm_epsilon": 1e-05,
39
  "length_penalty": 1.0,
40
- "max_length": 256,
41
  "min_length": 0,
42
  "model_type": "gpt2",
43
  "n_ctx": 1024,
@@ -48,7 +48,7 @@
48
  "n_positions": 1024,
49
  "no_repeat_ngram_size": 0,
50
  "num_beam_groups": 1,
51
- "num_beams": 3,
52
  "num_return_sequences": 1,
53
  "output_attentions": false,
54
  "output_hidden_states": false,
@@ -72,11 +72,11 @@
72
  "summary_use_proj": true,
73
  "task_specific_params": {
74
  "text-generation": {
 
75
  "do_sample": false,
 
76
  "max_length": 256,
77
- "num_beams": 5,
78
- "bos_token_id": 101,
79
- "eos_token_id": 102
80
  }
81
  },
82
  "temperature": 1.0,
 
37
  },
38
  "layer_norm_epsilon": 1e-05,
39
  "length_penalty": 1.0,
40
+ "max_length": 20,
41
  "min_length": 0,
42
  "model_type": "gpt2",
43
  "n_ctx": 1024,
 
48
  "n_positions": 1024,
49
  "no_repeat_ngram_size": 0,
50
  "num_beam_groups": 1,
51
+ "num_beams": 1,
52
  "num_return_sequences": 1,
53
  "output_attentions": false,
54
  "output_hidden_states": false,
 
72
  "summary_use_proj": true,
73
  "task_specific_params": {
74
  "text-generation": {
75
+ "bos_token_id": 101,
76
  "do_sample": false,
77
+ "eos_token_id": 102,
78
  "max_length": 256,
79
+ "num_beams": 3
 
 
80
  }
81
  },
82
  "temperature": 1.0,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e93235a86bcc21c4c24475dd45204265d52ee3ed2cd259997e47416cec9ed80
3
  size 960569915
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1913a0c0fbe09162badb2254403f966c65c48ac7e3c052c85f8a93fecd9fcd6b
3
  size 960569915