|
eval/beir-arguana_ndcg@10 = 0.38885 |
|
eval/beir-arguana_recall@100 = 0.93741 |
|
eval/beir-avg_ndcg@10 = 0.346578 |
|
eval/beir-avg_recall@10 = 0.40747999999999995 |
|
eval/beir-avg_recall@100 = 0.628204 |
|
eval/beir-avg_recall@20 = 0.47427600000000003 |
|
eval/beir-climate-fever_ndcg@10 = 0.15352 |
|
eval/beir-climate-fever_recall@10 = 0.19198 |
|
eval/beir-climate-fever_recall@100 = 0.41676 |
|
eval/beir-climate-fever_recall@20 = 0.24603 |
|
eval/beir-cqadupstack_ndcg@10 = 0.27400583333333334 |
|
eval/beir-cqadupstack_recall@100 = 0.6010533333333333 |
|
eval/beir-dbpedia-entity_ndcg@10 = 0.29868 |
|
eval/beir-dbpedia-entity_recall@10 = 0.19285 |
|
eval/beir-dbpedia-entity_recall@100 = 0.44014 |
|
eval/beir-dbpedia-entity_recall@20 = 0.25779 |
|
eval/beir-fever_ndcg@10 = 0.57055 |
|
eval/beir-fever_recall@10 = 0.75716 |
|
eval/beir-fever_recall@100 = 0.90338 |
|
eval/beir-fever_recall@20 = 0.81739 |
|
eval/beir-fiqa_ndcg@10 = 0.23933 |
|
eval/beir-fiqa_recall@100 = 0.56617 |
|
eval/beir-hotpotqa_ndcg@10 = 0.50295 |
|
eval/beir-hotpotqa_recall@10 = 0.53862 |
|
eval/beir-hotpotqa_recall@100 = 0.70223 |
|
eval/beir-hotpotqa_recall@20 = 0.58913 |
|
eval/beir-msmarco_ndcg@10 = 0.20719 |
|
eval/beir-msmarco_recall@10 = 0.35679 |
|
eval/beir-msmarco_recall@100 = 0.67851 |
|
eval/beir-msmarco_recall@20 = 0.46104 |
|
eval/beir-nfcorpus_ndcg@10 = 0.31333 |
|
eval/beir-nfcorpus_recall@100 = 0.29552 |
|
eval/beir-nq_ndcg@10 = 0.27242 |
|
eval/beir-nq_recall@100 = 0.79193 |
|
eval/beir-quora_ndcg@10 = 0.77732 |
|
eval/beir-quora_recall@100 = 0.97812 |
|
eval/beir-scidocs_ndcg@10 = 0.1498 |
|
eval/beir-scidocs_recall@100 = 0.36587 |
|
eval/beir-scifact_ndcg@10 = 0.66223 |
|
eval/beir-scifact_recall@100 = 0.90156 |
|
eval/beir-trec-covid_ndcg@10 = 0.58943 |
|
eval/beir-trec-covid_recall@100 = 0.10692 |
|
eval/beir-webis-touche2020_ndcg@10 = 0.18989 |
|
eval/beir-webis-touche2020_recall@100 = 0.45315 |
|
eval/qa-curatedtrec-test-acc@100 = 0.9279538904899135 |
|
eval/qa-curatedtrec-test-acc@20 = 0.8414985590778098 |
|
eval/qa-curatedtrec-test-acc@5 = 0.6412103746397695 |
|
eval/qa-entityqs-macro-acc@100 = 0.7589938692225383 |
|
eval/qa-entityqs-macro-acc@20 = 0.6390821603389343 |
|
eval/qa-entityqs-macro-acc@5 = 0.49924685926586126 |
|
eval/qa-nq-test-acc@100 = 0.8011080332409972 |
|
eval/qa-nq-test-acc@20 = 0.6518005540166205 |
|
eval/qa-nq-test-acc@5 = 0.4476454293628809 |
|
eval/qa-squad1-test-acc@100 = 0.7701040681173131 |
|
eval/qa-squad1-test-acc@20 = 0.6120151371807001 |
|
eval/qa-squad1-test-acc@5 = 0.41911069063386946 |
|
eval/qa-trivia-test-acc@100 = 0.8332891363917617 |
|
eval/qa-trivia-test-acc@20 = 0.7486077963404932 |
|
eval/qa-trivia-test-acc@5 = 0.6133651551312649 |
|
eval/qa-webq-test-acc@100 = 0.8105314960629921 |
|
eval/qa-webq-test-acc@20 = 0.6683070866141733 |
|
eval/qa-webq-test-acc@5 = 0.45570866141732286 |
|
eval/senteval-CR = 87.12 |
|
eval/senteval-MPQA = 89.04 |
|
eval/senteval-MR = 81.19 |
|
eval/senteval-MRPC = 71.69 |
|
eval/senteval-SICKRelatedness = 0.6909477744903625 |
|
eval/senteval-SST2 = 85.21 |
|
eval/senteval-STS12 = 0.5915463354166586 |
|
eval/senteval-STS13 = 0.7392805868780532 |
|
eval/senteval-STS14 = 0.670800547509326 |
|
eval/senteval-STS15 = 0.790296617367925 |
|
eval/senteval-STS16 = 0.7906179855205325 |
|
eval/senteval-STSBenchmark = 0.7534738635333297 |
|
eval/senteval-SUBJ = 95.61 |
|
eval/senteval-TREC = 80.69 |
|
eval/senteval-avg_sts_7 = 0.7181376729594552 |
|
eval/senteval-avg_transfer = 84.36428571428571 |
|
train/global_step = 100000 |
|
|