eval_results
/
AIR-Bench_24.04
/(LlamaIndex)Ada002+BM25+QueryFusionRetriever
/NoReranker
/results_20240521184012-c203e3788149c1fcb7fda31bd92f9523.json
[ | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_1", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.3773 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.37685 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.47619 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.43373 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_3", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.42982 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.45939 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.46198 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.55455 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_5", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.46897 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.50069 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.48747 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.58636 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_10", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.51928 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.54175 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.53722 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.60966 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_50", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.57904 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.59033 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.59543 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.64013 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_100", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.59125 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.59892 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.6084 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.64924 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "ndcg_at_1000", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.59823 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.60573 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.62381 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.65199 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_1", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.24921 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.31949 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.22908 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.43373 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_3", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.36211 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.41609 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.36551 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.52544 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_5", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.39853 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.44315 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.40707 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.54331 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_10", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.431 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.46353 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.4449 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.55288 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_50", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.45109 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.47709 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.4677 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.55954 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_100", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.45273 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.47807 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.46958 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.56033 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "map_at_1000", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.45313 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.4786 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.47051 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.56046 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_1", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.24921 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.31949 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.22908 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.43373 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_3", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.45169 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.52028 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.42672 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.63855 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_5", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.55009 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.61053 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.52044 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.71486 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_10", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.67371 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.72181 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.64453 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.78715 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_50", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.88732 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.9095 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.83635 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.92369 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_100", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.9521 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.95821 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.90021 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.97992 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "recall_at_1000", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 1.0 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 1.0 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 1.0 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 1.0 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_1", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.3773 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.37685 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.47619 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.43373 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_3", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.26176 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.22354 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.33147 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.21285 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_5", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.20184 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.16499 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.25434 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.14297 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_10", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.13037 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.10089 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.16779 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.07871 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_50", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.03521 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.02653 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.04465 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.01847 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_100", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.01874 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.01398 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.02375 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.0098 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "precision_at_1000", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.00195 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.00147 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.00259 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.001 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_1", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.3773 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.37685 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.47619 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.43373 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_3", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.48824 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.47527 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.55836 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.52544 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_5", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.50634 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.4959 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.5718 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.54331 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_10", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.51538 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.50917 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.58399 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.55288 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_50", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.52362 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.51699 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.58928 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.55954 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_100", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.52409 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.51746 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.58973 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.56033 | |
} | |
] | |
}, | |
{ | |
"config": { | |
"retrieval_model": "(LlamaIndex)Ada002+BM25+QueryFusionRetriever", | |
"retrieval_model_link": "https://huggingface.co/cheesyFishes/llamaindex_ada002_bm25_fusion", | |
"reranking_model": "NoReranker", | |
"reranking_model_link": null, | |
"task": "long-doc", | |
"metric": "mrr_at_1000", | |
"timestamp": "2024-05-21T18:40:12Z", | |
"is_anonymous": false, | |
"revision": "c203e3788149c1fcb7fda31bd92f9523" | |
}, | |
"results": [ | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llama2", | |
"value": 0.52427 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gpt3", | |
"value": 0.51759 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "llm-survey", | |
"value": 0.59 | |
}, | |
{ | |
"domain": "arxiv", | |
"lang": "en", | |
"dataset": "gemini", | |
"value": 0.56046 | |
} | |
] | |
} | |
] |