qMTEB / results /all-MiniLM-L6-v2-self-optimum-q8 /SprintDuplicateQuestions.json
varun4's picture
added results
fba41a4
{
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
"mteb_dataset_name": "SprintDuplicateQuestions",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.9978613861386139,
"accuracy_threshold": 0.7232062816619873,
"ap": 0.9452324861684893,
"f1": 0.8893421723610403,
"f1_threshold": 0.7091838121414185,
"precision": 0.9073881373569199,
"recall": 0.872
},
"dot": {
"accuracy": 0.9949306930693069,
"accuracy_threshold": 22.577022552490234,
"ap": 0.8059011556474294,
"f1": 0.7370959961408587,
"f1_threshold": 20.937259674072266,
"precision": 0.712022367194781,
"recall": 0.764
},
"euclidean": {
"accuracy": 0.9974257425742574,
"accuracy_threshold": 4.111460208892822,
"ap": 0.9195496585306108,
"f1": 0.8673469387755102,
"f1_threshold": 4.147090435028076,
"precision": 0.8854166666666666,
"recall": 0.85
},
"evaluation_time": 11.13,
"manhattan": {
"accuracy": 0.9974455445544554,
"accuracy_threshold": 63.65602111816406,
"ap": 0.9200343282163483,
"f1": 0.8664987405541562,
"f1_threshold": 65.03108978271484,
"precision": 0.8730964467005076,
"recall": 0.86
},
"max": {
"accuracy": 0.9978613861386139,
"ap": 0.9452324861684893,
"f1": 0.8893421723610403
}
}
}