File size: 4,081 Bytes
61b88be
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
{
    "config_general": {
        "model_name": "freewheelin/free-evo-qwen72b-v0.8-re",
        "model_dtype": "float16",
        "model_size": 0
    },
    "results": {
        "harness-c_arc_challenge": {
            "acc_norm": 71.16,
            "acc_stderr": 0,
            "c_arc_challenge_25shot_acc": 67.06,
            "c_arc_challenge_25shot_acc_norm": 71.16
        },
        "harness-c_gsm8k": {
            "acc": 70.28,
            "acc_stderr": 0,
            "c_gsm8k_5shot_acc": 70.28
        },
        "harness-c_hellaswag": {
            "acc_norm": 78.09,
            "acc_stderr": 0,
            "c_hellaswag_10shot_acc": 60.65,
            "c_hellaswag_10shot_acc_norm": 78.09
        },
        "harness-c-sem-v2": {
            "acc": 91.20749999999998,
            "acc_stderr": 0,
            "c_sem_v2-LLSRC_5shot_acc": 93.96,
            "c_sem_v2-SLPWC_5shot_acc": 88.57,
            "c_sem_v2-SLRFC_5shot_acc": 96.83,
            "c_sem_v2-SLSRC_5shot_acc": 85.47,
            "c_sem_v2-LLSRC_5shot_acc_norm": 93.96,
            "c_sem_v2-SLPWC_5shot_acc_norm": 88.57,
            "c_sem_v2-SLRFC_5shot_acc_norm": 96.83,
            "c_sem_v2-SLSRC_5shot_acc_norm": 85.47
        },
        "harness-c_truthfulqa_mc": {
            "mc2": 66.27,
            "acc_stderr": 0,
            "c_truthfulqa_mc_0shot_mc1": 48.59,
            "c_truthfulqa_mc_0shot_mc2": 66.27
        },
        "harness-c_winogrande": {
            "acc": 71.43,
            "acc_stderr": 0,
            "c_winogrande_0shot_acc": 71.43
        },
        "harness-cmmlu": {
            "acc_norm": 73.88,
            "acc_stderr": 0,
            "cmmlu_fullavg_5shot_acc": 73.88,
            "cmmlu-virology_5shot_acc": 52.41,
            "cmmlu-marketing_5shot_acc": 89.74,
            "cmmlu-nutrition_5shot_acc": 83.66,
            "cmmlu-sociology_5shot_acc": 88.06,
            "cmmlu-management_5shot_acc": 77.67,
            "cmmlu-philosophy_5shot_acc": 75.24,
            "cmmlu-prehistory_5shot_acc": 77.16,
            "cmmlu-miscellaneous_5shot_acc": 81.86,
            "cmmlu-moral_disputes_5shot_acc": 76.01,
            "cmmlu-moral_scenarios_5shot_acc": 64.58,
            "cmmlu-world_religions_5shot_acc": 77.78,
            "cmmlu-machine_learning_5shot_acc": 63.39,
            "cmmlu-medical_genetics_5shot_acc": 73.0,
            "cmmlu-professional_law_5shot_acc": 55.61,
            "cmmlu-public_relations_5shot_acc": 67.27,
            "cmmlu-security_studies_5shot_acc": 77.14,
            "cmmlu-us_foreign_policy_5shot_acc": 91.0,
            "cmmlu-professional_medicine_5shot_acc": 75.37,
            "cmmlu-professional_accounting_5shot_acc": 58.51,
            "cmmlu-professional_psychology_5shot_acc": 72.22,
            "cmmlu_fullavg_5shot_acc_norm": 73.88,
            "cmmlu-virology_5shot_acc_norm": 52.41,
            "cmmlu-marketing_5shot_acc_norm": 89.74,
            "cmmlu-nutrition_5shot_acc_norm": 83.66,
            "cmmlu-sociology_5shot_acc_norm": 88.06,
            "cmmlu-management_5shot_acc_norm": 77.67,
            "cmmlu-philosophy_5shot_acc_norm": 75.24,
            "cmmlu-prehistory_5shot_acc_norm": 77.16,
            "cmmlu-miscellaneous_5shot_acc_norm": 81.86,
            "cmmlu-moral_disputes_5shot_acc_norm": 76.01,
            "cmmlu-moral_scenarios_5shot_acc_norm": 64.58,
            "cmmlu-world_religions_5shot_acc_norm": 77.78,
            "cmmlu-machine_learning_5shot_acc_norm": 63.39,
            "cmmlu-medical_genetics_5shot_acc_norm": 73.0,
            "cmmlu-professional_law_5shot_acc_norm": 55.61,
            "cmmlu-public_relations_5shot_acc_norm": 67.27,
            "cmmlu-security_studies_5shot_acc_norm": 77.14,
            "cmmlu-us_foreign_policy_5shot_acc_norm": 91.0,
            "cmmlu-professional_medicine_5shot_acc_norm": 75.37,
            "cmmlu-professional_accounting_5shot_acc_norm": 58.51,
            "cmmlu-professional_psychology_5shot_acc_norm": 72.22
        }
    },
    "versions": {},
    "config_tasks": {},
    "summary_tasks": {},
    "summary_general": {}
}