File size: 1,959 Bytes
f462b60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
{
    "config_general": {
        "model_name": "abacusai/Smaug-Llama-3-70B-Instruct",
        "model_dtype": "float16",
        "model_size": 0
    },
    "results": {
        "harness-c_arc_challenge": {
            "acc_norm": 58.36,
            "acc_stderr": 0,
            "c_arc_challenge_25shot_acc": 54.1,
            "c_arc_challenge_25shot_acc_norm": 58.36
        },
        "harness-c_gsm8k": {
            "acc": 70.05,
            "acc_stderr": 0,
            "c_gsm8k_5shot_acc": 70.05
        },
        "harness-c_hellaswag": {
            "acc_norm": 64.73,
            "acc_stderr": 0,
            "c_hellaswag_10shot_acc": 47.24,
            "c_hellaswag_10shot_acc_norm": 64.73
        },
        "harness-c-sem-v2": {
            "acc": 87.07750000000001,
            "acc_stderr": 0,
            "c_sem_v2-LLSRC_5shot_acc": 90.79,
            "c_sem_v2-SLPWC_5shot_acc": 84.43,
            "c_sem_v2-SLRFC_5shot_acc": 89.06,
            "c_sem_v2-SLSRC_5shot_acc": 84.03,
            "c_sem_v2-LLSRC_5shot_acc_norm": 90.79,
            "c_sem_v2-SLPWC_5shot_acc_norm": 84.43,
            "c_sem_v2-SLRFC_5shot_acc_norm": 89.06,
            "c_sem_v2-SLSRC_5shot_acc_norm": 84.03
        },
        "harness-c_truthfulqa_mc": {
            "mc2": 53.91,
            "acc_stderr": 0,
            "c_truthfulqa_mc_0shot_mc1": 31.21,
            "c_truthfulqa_mc_0shot_mc2": 53.91
        },
        "harness-c_winogrande": {
            "acc": 62.12,
            "acc_stderr": 0,
            "c_winogrande_0shot_acc": 62.12
        },
        "harness-cmmlu": {
            "acc_norm": 80.7,
            "acc_stderr": 0,
            "cmmlu_fullavg_5shot_acc": 80.7,
            "cmmlu-world_religions_5shot_acc": 80.7,
            "cmmlu_fullavg_5shot_acc_norm": 80.7,
            "cmmlu-world_religions_5shot_acc_norm": 80.7
        }
    },
    "versions": {},
    "config_tasks": {},
    "summary_tasks": {},
    "summary_general": {}
}