Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- scripts/yans/lm-evaluation-harness/tests/testdata/ai2_arc_10_hf_pretrained-EleutherAI-pythia-14m-dtype-float32-device-cpu.txt +6 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/anagrams1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/anli_r3-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_anaphor_number_agreement-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_anaphor_number_agreement-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_causative-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_causative-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_1-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_1-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adjective_1-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_existential_there_object_raising-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_irregular_plural_subject_verb_agreement_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_passive_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_sentential_negation_npi_licensor_present-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_island-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cb-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cb-v1-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_disability-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cycle_letters-v0-greedy_until +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/ethics_utilitarianism_original-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/ethics_utilitarianism_original-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-business_ethics-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-business_ethics-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_biology-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_computer_science-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-computer_security-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-elementary_mathematics-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-international_law-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-miscellaneous-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-moral_scenarios-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-philosophy-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-public_relations-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-us_foreign_policy-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-virology-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-world_religions-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/iwslt17-ar-en-v0-greedy_until +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/iwslt17-en-ar-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_es-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai-v2.0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai-v2.0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai_mt_en-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/logiqa-v0-res.json +1 -0
scripts/yans/lm-evaluation-harness/tests/testdata/ai2_arc_10_hf_pretrained-EleutherAI-pythia-14m-dtype-float32-device-cpu.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value| |Stderr|
|
2 |
+
|-------------|------:|------|-----:|--------|---|----:|---|------|
|
3 |
+
|arc_challenge| 1|none | 0|acc |↑ | 0.0|± | N/A|
|
4 |
+
| | |none | 0|acc_norm|↑ | 0.0|± | N/A|
|
5 |
+
|arc_easy | 1|none | 0|acc |↑ | 0.3|± | N/A|
|
6 |
+
| | |none | 0|acc_norm|↑ | 0.1|± | N/A|
|
scripts/yans/lm-evaluation-harness/tests/testdata/anagrams1-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anagrams1": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"anagrams1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/anli_r3-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"anli_r3": {"acc": 0.31916666666666665, "acc_stderr": 0.01346230971200514}}, "versions": {"anli_r3": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_anaphor_number_agreement-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
0bdad31c974ba064e1f1ba931841ec2ba7461e8b0ca54ea5f79f08b6bae0bab5
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_anaphor_number_agreement-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_anaphor_number_agreement": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_anaphor_number_agreement": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_causative-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
3d67ad025185dbb0808ebd7f508edcb5750c18fc3c01ad91f20fda80780c916c
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_causative-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_causative": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_causative": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
2df8cc7f17089f7e8c7d974dcb324c809d30ef059a5be22aed6b69f44230809f
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
95acb74fac7d57ae2c9d208361a5f8ad36b0b19a055f02e648ed8e99505f4b43
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_determiner_noun_agreement_with_adj_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_with_adj_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adj_irregular_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ad61c619aa79433d02f1aeacde2ab87291fd5d5c370032c24d41c4f0065ed1f9
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_with_adjective_1-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
007c47e5fbf88119c5180feef75e1345d448e56adcd4c7ab2d52fb8d67350d34
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_existential_there_object_raising-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
63567712076256f373131971676c1c6d711efef73cd0e4de3cc639bc631a2413
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_irregular_plural_subject_verb_agreement_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_irregular_plural_subject_verb_agreement_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_irregular_plural_subject_verb_agreement_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_passive_2-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_passive_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_passive_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_sentential_negation_npi_licensor_present-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_sentential_negation_npi_licensor_present": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_sentential_negation_npi_licensor_present": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_island-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
91a9e4b60b0f3572a7fdbd7648d0e69f36e5eb34db715315b0082558d7ed8b65
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"blimp_wh_questions_subject_gap_long_distance": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_questions_subject_gap_long_distance": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/cb-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"cb": {"acc": 0.3392857142857143, "acc_stderr": 0.06384226561930825, "f1": 0.2819143819143819}}, "versions": {"cb": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/cb-v1-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"cb": {"acc": 0.3392857142857143, "acc_stderr": 0.06384226561930825, "f1": 0.2819143819143819}}, "versions": {"cb": 1}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ee3ce1ddb8071d4189e5b06e7f3c618a434221ac52935d0f434c4d183f01458a
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_disability-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"crows_pairs_english_disability": {"likelihood_difference": 0.3148684792547637, "likelihood_difference_stderr": 0.02800803147051987, "pct_stereotype": 0.36923076923076925, "pct_stereotype_stderr": 0.06032456592830047}}, "versions": {"crows_pairs_english_disability": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
d1823f5038afafa7a5338e42531720480c8ccf4e177789526caf294d52d56e89
|
scripts/yans/lm-evaluation-harness/tests/testdata/cycle_letters-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
eb23f7d5de7528eefd8ed5f8054c402ff947319cccfef7195995946f99389201
|
scripts/yans/lm-evaluation-harness/tests/testdata/ethics_utilitarianism_original-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
5b42ba1faf5ece6a6ec9a3976ce79c1fac8df5b98272aab85457188c2142693c
|
scripts/yans/lm-evaluation-harness/tests/testdata/ethics_utilitarianism_original-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"ethics_utilitarianism_original": {"acc": 0.5214226289517471, "acc_stderr": 0.007204999520618661}}, "versions": {"ethics_utilitarianism_original": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-business_ethics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
b3b27e9dbad587377d3c8cab1072782de883e245da93a563bd8b3099017b1fc0
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-business_ethics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-business_ethics": {"acc": 0.29, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394, "acc_stderr": 0.045604802157206845}}, "versions": {"hendrycksTest-business_ethics": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_biology-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
c29e4e67ff91af29b9434884874414d1b1b32ccc32903c6b1639469b19907419
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_computer_science-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
4ea26ad780290429ac5a3317559c154848d662bd40532c966458ba6f2a32d0a3
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-college_physics-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
704a7671ef981fb95594782bc446dd632e87ebdbe89436a0603b714fb5786c75
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-computer_security-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-computer_security": {"acc": 0.24, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394, "acc_stderr": 0.042923469599092816}}, "versions": {"hendrycksTest-computer_security": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-elementary_mathematics-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-elementary_mathematics": {"acc": 0.2724867724867725, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194978, "acc_stderr": 0.022930973071633345}}, "versions": {"hendrycksTest-elementary_mathematics": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-international_law-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ea9b2cefd27959db564168f6ad1169a5eaa012fc5a5d5b8faf9e34d94e335dc1
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-miscellaneous-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
972dd88dbbaf09d14766e243cfc233425e7c01a26dbc61bdb9eeefa788822331
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-moral_scenarios-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
a8e1882e77728b53c8b86312254d08320d8363fb606d746a8dd145b812f62cf5
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-nutrition": {"acc": 0.24509803921568626, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.025738854797818723, "acc_stderr": 0.02463004897982476}}, "versions": {"hendrycksTest-nutrition": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-philosophy-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-philosophy": {"acc": 0.26366559485530544, "acc_norm": 0.2733118971061093, "acc_norm_stderr": 0.02531176597542612, "acc_stderr": 0.02502553850053234}}, "versions": {"hendrycksTest-philosophy": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
c38c9d5d84eeb7a5f3c4a34d6e70d7e15847b3c38f26e4b119c982bb935e118f
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-public_relations-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-public_relations": {"acc": 0.3090909090909091, "acc_norm": 0.2636363636363636, "acc_norm_stderr": 0.04220224692971987, "acc_stderr": 0.044262946482000985}}, "versions": {"hendrycksTest-public_relations": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-us_foreign_policy-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-us_foreign_policy": {"acc": 0.2, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283, "acc_stderr": 0.040201512610368445}}, "versions": {"hendrycksTest-us_foreign_policy": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-virology-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-virology": {"acc": 0.27710843373493976, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027, "acc_stderr": 0.034843315926805875}}, "versions": {"hendrycksTest-virology": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-world_religions-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"hendrycksTest-world_religions": {"acc": 0.21637426900584794, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03218093795602357, "acc_stderr": 0.03158149539338734}}, "versions": {"hendrycksTest-world_religions": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/iwslt17-ar-en-v0-greedy_until
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
e94d310de91fad7ce36f4cf3305552020221482c5588f2efcefaa019893504f1
|
scripts/yans/lm-evaluation-harness/tests/testdata/iwslt17-en-ar-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"iwslt17-en-ar": {"bleu": 0.0, "bleu_stderr": 0.0, "chrf": 0.0, "chrf_stderr": 0.0, "ter": 1.0, "ter_stderr": 0.0}}, "versions": {"iwslt17-en-ar": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_es-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_mt_es": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_mt_es": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai-v2.0-loglikelihood
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
9ca5643bbaafed2f027eab5b68cc438e9e268f6df9a678e956e61726a985cf0b
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai-v2.0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_openai": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_openai": "2.0"}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_openai_mt_en-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"lambada_openai_mt_en": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_openai_mt_en": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/logiqa-v0-res.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"results": {"logiqa": {"acc": 0.25806451612903225, "acc_norm": 0.2764976958525346, "acc_norm_stderr": 0.017543209075825194, "acc_stderr": 0.017162894755127077}}, "versions": {"logiqa": 0}}
|