yzhuang commited on
Commit
f6333d9
1 Parent(s): 9df27cf

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,851 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "yzhuang/llava-falcon-mamba-7B-linear-anyres",
3
+ "architectures": [
4
+ "LlavaFalconMambaForCausalLM"
5
+ ],
6
+ "bos_token_id": 8,
7
+ "conv_kernel": 4,
8
+ "eos_token_id": 11,
9
+ "expand": 16,
10
+ "freeze_mm_mlp_adapter": false,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 4096,
13
+ "image_aspect_ratio": "anyres",
14
+ "image_grid_pinpoints": [
15
+ [
16
+ 336,
17
+ 336
18
+ ],
19
+ [
20
+ 336,
21
+ 672
22
+ ],
23
+ [
24
+ 336,
25
+ 1008
26
+ ],
27
+ [
28
+ 336,
29
+ 1344
30
+ ],
31
+ [
32
+ 336,
33
+ 1680
34
+ ],
35
+ [
36
+ 336,
37
+ 2016
38
+ ],
39
+ [
40
+ 336,
41
+ 2352
42
+ ],
43
+ [
44
+ 336,
45
+ 2688
46
+ ],
47
+ [
48
+ 336,
49
+ 3024
50
+ ],
51
+ [
52
+ 336,
53
+ 3360
54
+ ],
55
+ [
56
+ 336,
57
+ 3696
58
+ ],
59
+ [
60
+ 336,
61
+ 4032
62
+ ],
63
+ [
64
+ 336,
65
+ 4368
66
+ ],
67
+ [
68
+ 336,
69
+ 4704
70
+ ],
71
+ [
72
+ 336,
73
+ 5040
74
+ ],
75
+ [
76
+ 336,
77
+ 5376
78
+ ],
79
+ [
80
+ 336,
81
+ 5712
82
+ ],
83
+ [
84
+ 336,
85
+ 6048
86
+ ],
87
+ [
88
+ 336,
89
+ 6384
90
+ ],
91
+ [
92
+ 336,
93
+ 6720
94
+ ],
95
+ [
96
+ 336,
97
+ 7056
98
+ ],
99
+ [
100
+ 336,
101
+ 7392
102
+ ],
103
+ [
104
+ 336,
105
+ 7728
106
+ ],
107
+ [
108
+ 336,
109
+ 8064
110
+ ],
111
+ [
112
+ 336,
113
+ 8400
114
+ ],
115
+ [
116
+ 336,
117
+ 8736
118
+ ],
119
+ [
120
+ 336,
121
+ 9072
122
+ ],
123
+ [
124
+ 336,
125
+ 9408
126
+ ],
127
+ [
128
+ 336,
129
+ 9744
130
+ ],
131
+ [
132
+ 336,
133
+ 10080
134
+ ],
135
+ [
136
+ 336,
137
+ 10416
138
+ ],
139
+ [
140
+ 336,
141
+ 10752
142
+ ],
143
+ [
144
+ 336,
145
+ 11088
146
+ ],
147
+ [
148
+ 336,
149
+ 11424
150
+ ],
151
+ [
152
+ 336,
153
+ 11760
154
+ ],
155
+ [
156
+ 336,
157
+ 12096
158
+ ],
159
+ [
160
+ 336,
161
+ 12432
162
+ ],
163
+ [
164
+ 336,
165
+ 12768
166
+ ],
167
+ [
168
+ 336,
169
+ 13104
170
+ ],
171
+ [
172
+ 336,
173
+ 13440
174
+ ],
175
+ [
176
+ 336,
177
+ 13776
178
+ ],
179
+ [
180
+ 336,
181
+ 14112
182
+ ],
183
+ [
184
+ 336,
185
+ 14448
186
+ ],
187
+ [
188
+ 336,
189
+ 14784
190
+ ],
191
+ [
192
+ 336,
193
+ 15120
194
+ ],
195
+ [
196
+ 336,
197
+ 15456
198
+ ],
199
+ [
200
+ 336,
201
+ 15792
202
+ ],
203
+ [
204
+ 336,
205
+ 16128
206
+ ],
207
+ [
208
+ 672,
209
+ 336
210
+ ],
211
+ [
212
+ 672,
213
+ 672
214
+ ],
215
+ [
216
+ 672,
217
+ 1008
218
+ ],
219
+ [
220
+ 672,
221
+ 1344
222
+ ],
223
+ [
224
+ 672,
225
+ 1680
226
+ ],
227
+ [
228
+ 672,
229
+ 2016
230
+ ],
231
+ [
232
+ 672,
233
+ 2352
234
+ ],
235
+ [
236
+ 672,
237
+ 2688
238
+ ],
239
+ [
240
+ 672,
241
+ 3024
242
+ ],
243
+ [
244
+ 672,
245
+ 3360
246
+ ],
247
+ [
248
+ 672,
249
+ 3696
250
+ ],
251
+ [
252
+ 672,
253
+ 4032
254
+ ],
255
+ [
256
+ 672,
257
+ 4368
258
+ ],
259
+ [
260
+ 672,
261
+ 4704
262
+ ],
263
+ [
264
+ 672,
265
+ 5040
266
+ ],
267
+ [
268
+ 672,
269
+ 5376
270
+ ],
271
+ [
272
+ 672,
273
+ 5712
274
+ ],
275
+ [
276
+ 672,
277
+ 6048
278
+ ],
279
+ [
280
+ 672,
281
+ 6384
282
+ ],
283
+ [
284
+ 672,
285
+ 6720
286
+ ],
287
+ [
288
+ 672,
289
+ 7056
290
+ ],
291
+ [
292
+ 672,
293
+ 7392
294
+ ],
295
+ [
296
+ 672,
297
+ 7728
298
+ ],
299
+ [
300
+ 672,
301
+ 8064
302
+ ],
303
+ [
304
+ 1008,
305
+ 336
306
+ ],
307
+ [
308
+ 1008,
309
+ 672
310
+ ],
311
+ [
312
+ 1008,
313
+ 1008
314
+ ],
315
+ [
316
+ 1008,
317
+ 1344
318
+ ],
319
+ [
320
+ 1008,
321
+ 1680
322
+ ],
323
+ [
324
+ 1008,
325
+ 2016
326
+ ],
327
+ [
328
+ 1008,
329
+ 2352
330
+ ],
331
+ [
332
+ 1008,
333
+ 2688
334
+ ],
335
+ [
336
+ 1008,
337
+ 3024
338
+ ],
339
+ [
340
+ 1008,
341
+ 3360
342
+ ],
343
+ [
344
+ 1008,
345
+ 3696
346
+ ],
347
+ [
348
+ 1008,
349
+ 4032
350
+ ],
351
+ [
352
+ 1008,
353
+ 4368
354
+ ],
355
+ [
356
+ 1008,
357
+ 4704
358
+ ],
359
+ [
360
+ 1008,
361
+ 5040
362
+ ],
363
+ [
364
+ 1008,
365
+ 5376
366
+ ],
367
+ [
368
+ 1344,
369
+ 336
370
+ ],
371
+ [
372
+ 1344,
373
+ 672
374
+ ],
375
+ [
376
+ 1344,
377
+ 1008
378
+ ],
379
+ [
380
+ 1344,
381
+ 1344
382
+ ],
383
+ [
384
+ 1344,
385
+ 1680
386
+ ],
387
+ [
388
+ 1344,
389
+ 2016
390
+ ],
391
+ [
392
+ 1344,
393
+ 2352
394
+ ],
395
+ [
396
+ 1344,
397
+ 2688
398
+ ],
399
+ [
400
+ 1344,
401
+ 3024
402
+ ],
403
+ [
404
+ 1344,
405
+ 3360
406
+ ],
407
+ [
408
+ 1344,
409
+ 3696
410
+ ],
411
+ [
412
+ 1344,
413
+ 4032
414
+ ],
415
+ [
416
+ 1680,
417
+ 336
418
+ ],
419
+ [
420
+ 1680,
421
+ 672
422
+ ],
423
+ [
424
+ 1680,
425
+ 1008
426
+ ],
427
+ [
428
+ 1680,
429
+ 1344
430
+ ],
431
+ [
432
+ 1680,
433
+ 1680
434
+ ],
435
+ [
436
+ 1680,
437
+ 2016
438
+ ],
439
+ [
440
+ 1680,
441
+ 2352
442
+ ],
443
+ [
444
+ 1680,
445
+ 2688
446
+ ],
447
+ [
448
+ 1680,
449
+ 3024
450
+ ],
451
+ [
452
+ 2016,
453
+ 336
454
+ ],
455
+ [
456
+ 2016,
457
+ 672
458
+ ],
459
+ [
460
+ 2016,
461
+ 1008
462
+ ],
463
+ [
464
+ 2016,
465
+ 1344
466
+ ],
467
+ [
468
+ 2016,
469
+ 1680
470
+ ],
471
+ [
472
+ 2016,
473
+ 2016
474
+ ],
475
+ [
476
+ 2016,
477
+ 2352
478
+ ],
479
+ [
480
+ 2016,
481
+ 2688
482
+ ],
483
+ [
484
+ 2352,
485
+ 336
486
+ ],
487
+ [
488
+ 2352,
489
+ 672
490
+ ],
491
+ [
492
+ 2352,
493
+ 1008
494
+ ],
495
+ [
496
+ 2352,
497
+ 1344
498
+ ],
499
+ [
500
+ 2352,
501
+ 1680
502
+ ],
503
+ [
504
+ 2352,
505
+ 2016
506
+ ],
507
+ [
508
+ 2352,
509
+ 2352
510
+ ],
511
+ [
512
+ 2688,
513
+ 336
514
+ ],
515
+ [
516
+ 2688,
517
+ 672
518
+ ],
519
+ [
520
+ 2688,
521
+ 1008
522
+ ],
523
+ [
524
+ 2688,
525
+ 1344
526
+ ],
527
+ [
528
+ 2688,
529
+ 1680
530
+ ],
531
+ [
532
+ 2688,
533
+ 2016
534
+ ],
535
+ [
536
+ 3024,
537
+ 336
538
+ ],
539
+ [
540
+ 3024,
541
+ 672
542
+ ],
543
+ [
544
+ 3024,
545
+ 1008
546
+ ],
547
+ [
548
+ 3024,
549
+ 1344
550
+ ],
551
+ [
552
+ 3024,
553
+ 1680
554
+ ],
555
+ [
556
+ 3360,
557
+ 336
558
+ ],
559
+ [
560
+ 3360,
561
+ 672
562
+ ],
563
+ [
564
+ 3360,
565
+ 1008
566
+ ],
567
+ [
568
+ 3360,
569
+ 1344
570
+ ],
571
+ [
572
+ 3696,
573
+ 336
574
+ ],
575
+ [
576
+ 3696,
577
+ 672
578
+ ],
579
+ [
580
+ 3696,
581
+ 1008
582
+ ],
583
+ [
584
+ 3696,
585
+ 1344
586
+ ],
587
+ [
588
+ 4032,
589
+ 336
590
+ ],
591
+ [
592
+ 4032,
593
+ 672
594
+ ],
595
+ [
596
+ 4032,
597
+ 1008
598
+ ],
599
+ [
600
+ 4032,
601
+ 1344
602
+ ],
603
+ [
604
+ 4368,
605
+ 336
606
+ ],
607
+ [
608
+ 4368,
609
+ 672
610
+ ],
611
+ [
612
+ 4368,
613
+ 1008
614
+ ],
615
+ [
616
+ 4704,
617
+ 336
618
+ ],
619
+ [
620
+ 4704,
621
+ 672
622
+ ],
623
+ [
624
+ 4704,
625
+ 1008
626
+ ],
627
+ [
628
+ 5040,
629
+ 336
630
+ ],
631
+ [
632
+ 5040,
633
+ 672
634
+ ],
635
+ [
636
+ 5040,
637
+ 1008
638
+ ],
639
+ [
640
+ 5376,
641
+ 336
642
+ ],
643
+ [
644
+ 5376,
645
+ 672
646
+ ],
647
+ [
648
+ 5376,
649
+ 1008
650
+ ],
651
+ [
652
+ 5712,
653
+ 336
654
+ ],
655
+ [
656
+ 5712,
657
+ 672
658
+ ],
659
+ [
660
+ 6048,
661
+ 336
662
+ ],
663
+ [
664
+ 6048,
665
+ 672
666
+ ],
667
+ [
668
+ 6384,
669
+ 336
670
+ ],
671
+ [
672
+ 6384,
673
+ 672
674
+ ],
675
+ [
676
+ 6720,
677
+ 336
678
+ ],
679
+ [
680
+ 6720,
681
+ 672
682
+ ],
683
+ [
684
+ 7056,
685
+ 336
686
+ ],
687
+ [
688
+ 7056,
689
+ 672
690
+ ],
691
+ [
692
+ 7392,
693
+ 336
694
+ ],
695
+ [
696
+ 7392,
697
+ 672
698
+ ],
699
+ [
700
+ 7728,
701
+ 336
702
+ ],
703
+ [
704
+ 7728,
705
+ 672
706
+ ],
707
+ [
708
+ 8064,
709
+ 336
710
+ ],
711
+ [
712
+ 8064,
713
+ 672
714
+ ],
715
+ [
716
+ 8400,
717
+ 336
718
+ ],
719
+ [
720
+ 8736,
721
+ 336
722
+ ],
723
+ [
724
+ 9072,
725
+ 336
726
+ ],
727
+ [
728
+ 9408,
729
+ 336
730
+ ],
731
+ [
732
+ 9744,
733
+ 336
734
+ ],
735
+ [
736
+ 10080,
737
+ 336
738
+ ],
739
+ [
740
+ 10416,
741
+ 336
742
+ ],
743
+ [
744
+ 10752,
745
+ 336
746
+ ],
747
+ [
748
+ 11088,
749
+ 336
750
+ ],
751
+ [
752
+ 11424,
753
+ 336
754
+ ],
755
+ [
756
+ 11760,
757
+ 336
758
+ ],
759
+ [
760
+ 12096,
761
+ 336
762
+ ],
763
+ [
764
+ 12432,
765
+ 336
766
+ ],
767
+ [
768
+ 12768,
769
+ 336
770
+ ],
771
+ [
772
+ 13104,
773
+ 336
774
+ ],
775
+ [
776
+ 13440,
777
+ 336
778
+ ],
779
+ [
780
+ 13776,
781
+ 336
782
+ ],
783
+ [
784
+ 14112,
785
+ 336
786
+ ],
787
+ [
788
+ 14448,
789
+ 336
790
+ ],
791
+ [
792
+ 14784,
793
+ 336
794
+ ],
795
+ [
796
+ 15120,
797
+ 336
798
+ ],
799
+ [
800
+ 15456,
801
+ 336
802
+ ],
803
+ [
804
+ 15792,
805
+ 336
806
+ ],
807
+ [
808
+ 16128,
809
+ 336
810
+ ]
811
+ ],
812
+ "initializer_range": 0.1,
813
+ "intermediate_size": 8192,
814
+ "layer_norm_epsilon": 1e-05,
815
+ "max_position_embeddings": 1000000000000000019884624838656,
816
+ "mm_hidden_size": 1024,
817
+ "mm_patch_merge_type": "spatial_unpad",
818
+ "mm_projector_lr": 2e-05,
819
+ "mm_projector_type": "linear",
820
+ "mm_use_im_patch_token": false,
821
+ "mm_use_im_start_end": false,
822
+ "mm_vision_select_feature": "patch",
823
+ "mm_vision_select_layer": -2,
824
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
825
+ "mm_vision_tower_lr": 2e-06,
826
+ "model_type": "falcon_mamba",
827
+ "num_hidden_layers": 64,
828
+ "pad_token_id": 0,
829
+ "rescale_prenorm_residual": false,
830
+ "residual_in_fp32": true,
831
+ "state_size": 16,
832
+ "tie_word_embeddings": false,
833
+ "time_step_floor": 0.0001,
834
+ "time_step_init_scheme": "random",
835
+ "time_step_max": 0.1,
836
+ "time_step_min": 0.001,
837
+ "time_step_rank": 256,
838
+ "time_step_scale": 1.0,
839
+ "tokenizer_model_max_length": 32896,
840
+ "tokenizer_padding_side": "right",
841
+ "torch_dtype": "bfloat16",
842
+ "transformers_version": "4.45.0.dev0",
843
+ "tune_mm_mlp_adapter": false,
844
+ "unfreeze_mm_vision_tower": true,
845
+ "use_bias": false,
846
+ "use_cache": false,
847
+ "use_conv_bias": true,
848
+ "use_mambapy": false,
849
+ "use_mm_proj": true,
850
+ "vocab_size": 65024
851
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:699abdfae0716a4f58d1b693912df34914aefe8120d1e52b71faecba105e2c72
3
+ size 4956192312
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60087e6b5d99f2707429bd91d67633c2cff2047faf14622fcb1d5c6937b13e1c
3
+ size 4987536920
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c0e23d0245159930b2f2f7fdd82cbdbc81c14786493205572f6166dd9648991
3
+ size 4684478664
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c87bef7cdc7aedea8ff9473df5937c51f9067e73c11e97878adff89beeb3b6f
3
+ size 532676736
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ ">>TITLE<<",
4
+ ">>ABSTRACT<<",
5
+ ">>INTRODUCTION<<",
6
+ ">>SUMMARY<<",
7
+ ">>COMMENT<<",
8
+ ">>ANSWER<<",
9
+ ">>QUESTION<<",
10
+ "assistant",
11
+ "<|begin_of_text|>",
12
+ "<|im_start|>",
13
+ "<|im_end|>"
14
+ ],
15
+ "bos_token": {
16
+ "content": "<|begin_of_text|>",
17
+ "lstrip": false,
18
+ "normalized": false,
19
+ "rstrip": false,
20
+ "single_word": false
21
+ },
22
+ "eos_token": {
23
+ "content": "<|end_of_text|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false
28
+ },
29
+ "pad_token": {
30
+ "content": ">>TITLE<<",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false
35
+ }
36
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": ">>TITLE<<",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": ">>ABSTRACT<<",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": ">>INTRODUCTION<<",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": ">>SUMMARY<<",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": ">>COMMENT<<",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": ">>ANSWER<<",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": ">>QUESTION<<",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "assistant",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<|begin_of_text|>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<|im_start|>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<|im_end|>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<|end_of_text|>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ }
100
+ },
101
+ "additional_special_tokens": [
102
+ ">>TITLE<<",
103
+ ">>ABSTRACT<<",
104
+ ">>INTRODUCTION<<",
105
+ ">>SUMMARY<<",
106
+ ">>COMMENT<<",
107
+ ">>ANSWER<<",
108
+ ">>QUESTION<<",
109
+ "assistant",
110
+ "<|begin_of_text|>",
111
+ "<|im_start|>",
112
+ "<|im_end|>"
113
+ ],
114
+ "bos_token": "<|begin_of_text|>",
115
+ "chat_template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
116
+ "clean_up_tokenization_spaces": true,
117
+ "eos_token": "<|end_of_text|>",
118
+ "max_length": null,
119
+ "model_input_names": [
120
+ "input_ids",
121
+ "attention_mask"
122
+ ],
123
+ "model_max_length": 32896,
124
+ "pad_to_multiple_of": null,
125
+ "pad_token": ">>TITLE<<",
126
+ "pad_token_type_id": 0,
127
+ "padding_side": "right",
128
+ "tokenizer_class": "PreTrainedTokenizerFast"
129
+ }