HachiML commited on
Commit
6c726cf
1 Parent(s): d5beaa9

Upload Qwen2ForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +5 -0
config.json CHANGED
@@ -20,6 +20,11 @@
20
  "rope_scaling": null,
21
  "rope_theta": 1000000.0,
22
  "sliding_window": null,
 
 
 
 
 
23
  "tie_word_embeddings": true,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.46.3",
 
20
  "rope_scaling": null,
21
  "rope_theta": 1000000.0,
22
  "sliding_window": null,
23
+ "step_separator_ids": [
24
+ 3407,
25
+ 2533,
26
+ 271
27
+ ],
28
  "tie_word_embeddings": true,
29
  "torch_dtype": "bfloat16",
30
  "transformers_version": "4.46.3",