nnngoc commited on
Commit
3ede488
1 Parent(s): 4beecae

Upload 7 files

Browse files
[email protected] ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,MRR@10,NDCG@10
2
+ 0,1000,0.29738690476190477,0.34032839206744936
3
+ 0,2000,0.30844841269841267,0.35233595389381434
4
+ 0,3000,0.3399444444444445,0.3844602514982593
5
+ 0,4000,0.3730456349206349,0.41700926384956694
6
+ 0,5000,0.37280952380952387,0.4202655098721365
7
+ 0,6000,0.38894047619047617,0.4317672265065826
8
+ 0,7000,0.39206746031746037,0.4453424697751804
9
+ 0,8000,0.3848095238095239,0.442181929240689
10
+ 0,9000,0.38661904761904764,0.4424993156912144
11
+ 0,10000,0.38632341269841264,0.4437529251009649
12
+ 0,11000,0.4095793650793651,0.46257753001658386
13
+ 0,12000,0.41387301587301584,0.4699479524157117
14
+ 0,13000,0.4156547619047619,0.4722938458851684
15
+ 0,14000,0.4093253968253968,0.4635029087386801
16
+ 0,15000,0.38688492063492064,0.4449751756447218
17
+ 0,16000,0.41827777777777775,0.4735224043513835
18
+ 0,17000,0.41301984126984126,0.46633864029878763
19
+ 0,18000,0.4034781746031746,0.4588431184767029
20
+ 0,19000,0.387452380952381,0.4484195477868819
21
+ 0,20000,0.41254960317460315,0.4728754550475716
22
+ 0,21000,0.4238452380952381,0.47778049725646676
23
+ 0,22000,0.4428015873015873,0.4939497151324227
24
+ 0,23000,0.42132341269841267,0.47916220564764644
25
+ 0,24000,0.4261865079365079,0.48739896302436436
26
+ 0,25000,0.41726587301587303,0.476767736116674
27
+ 0,26000,0.42959325396825393,0.4880944906777834
28
+ 0,27000,0.42758730158730157,0.4875116836811202
29
+ 0,28000,0.44155555555555553,0.49416604804543485
30
+ 0,29000,0.42599801587301583,0.4816256421549538
31
+ 0,30000,0.42875595238095243,0.4885635595477285
32
+ 0,31000,0.44228968253968254,0.4993420740896253
33
+ 0,-1,0.42438293650793646,0.4864878808795836
34
+ 1,1000,0.4255555555555556,0.4833138367283381
35
+ 1,2000,0.4268809523809524,0.4872616322542224
36
+ 1,3000,0.4379365079365079,0.49341528572166055
37
+ 1,4000,0.44076388888888884,0.49660232832392454
38
+ 1,5000,0.4308055555555555,0.48800076675298937
39
+ 1,6000,0.4364345238095238,0.4958032235497148
40
+ 1,7000,0.43543849206349206,0.49002080157559424
41
+ 1,8000,0.4422123015873016,0.4949142911222971
42
+ 1,9000,0.43767857142857136,0.49518950345096996
43
+ 1,10000,0.42800198412698415,0.4912192142862679
44
+ 1,11000,0.43849801587301585,0.49609200056529146
45
+ 1,12000,0.44205952380952385,0.4994254978635945
46
+ 1,13000,0.4272519841269842,0.4868850084003704
47
+ 1,14000,0.43184920634920637,0.4893173388361987
48
+ 1,15000,0.4282261904761905,0.4868847293264529
49
+ 1,16000,0.4345039682539683,0.4892160859212514
50
+ 1,17000,0.44295238095238093,0.49771369741372673
51
+ 1,18000,0.43580357142857146,0.4954543605854125
52
+ 1,19000,0.4403829365079365,0.49743905013170137
53
+ 1,20000,0.43377380952380945,0.4914040907871128
54
+ 1,21000,0.43663492063492065,0.4978456660378262
55
+ 1,22000,0.4374940476190476,0.4978425922525963
56
+ 1,23000,0.43878571428571433,0.49713127937954554
57
+ 1,24000,0.44307936507936513,0.49951784790296033
58
+ 1,25000,0.4409801587301587,0.4986984153979162
59
+ 1,26000,0.44124603174603183,0.4993188751666098
60
+ 1,27000,0.4413015873015873,0.49734061418137093
61
+ 1,28000,0.4385575396825397,0.4985912885708459
62
+ 1,29000,0.44225396825396823,0.5014830795837447
63
+ 1,30000,0.4432380952380953,0.5000511711675008
64
+ 1,31000,0.4417460317460318,0.500042507336213
65
+ 1,-1,0.4414960317460317,0.49975326338587034
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "cross-encoder/ms-marco-MiniLM-L-6-v2",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 384,
12
+ "id2label": {
13
+ "0": "LABEL_0"
14
+ },
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 1536,
17
+ "label2id": {
18
+ "LABEL_0": 0
19
+ },
20
+ "layer_norm_eps": 1e-12,
21
+ "max_position_embeddings": 512,
22
+ "model_type": "bert",
23
+ "num_attention_heads": 12,
24
+ "num_hidden_layers": 6,
25
+ "pad_token_id": 0,
26
+ "position_embedding_type": "absolute",
27
+ "sbert_ce_default_activation_function": "torch.nn.modules.linear.Identity",
28
+ "torch_dtype": "float32",
29
+ "transformers_version": "4.39.3",
30
+ "type_vocab_size": 2,
31
+ "use_cache": true,
32
+ "vocab_size": 30522
33
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1530c2a0e9067031d21c9cf681d1f11a4d9a3afcda426a8b3a928a82026647d
3
+ size 90866412
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "mask_token": "[MASK]",
49
+ "model_max_length": 512,
50
+ "never_split": null,
51
+ "pad_token": "[PAD]",
52
+ "sep_token": "[SEP]",
53
+ "strip_accents": null,
54
+ "tokenize_chinese_chars": true,
55
+ "tokenizer_class": "BertTokenizer",
56
+ "unk_token": "[UNK]"
57
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff