jacobmorrison commited on
Commit
4e516e8
1 Parent(s): 40c69f7

Upload 15 files

Browse files
all_results.json ADDED
@@ -0,0 +1,548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "eval_global_step": 17980,
4
+ "eval_runtime": 0.0055,
5
+ "eval_samples": 0,
6
+ "eval_samples_per_second": 0.0,
7
+ "eval_steps_per_second": 0.0,
8
+ "predict_exact_match": 34.1176,
9
+ "predict_exact_match_for_answerability_classification": 67.6923,
10
+ "predict_exact_match_for_cause_effect_classification": 51.4286,
11
+ "predict_exact_match_for_coreference_resolution": 45.7143,
12
+ "predict_exact_match_for_data_to_text": 0.0,
13
+ "predict_exact_match_for_dialogue_act_recognition": 51.4286,
14
+ "predict_exact_match_for_grammar_error_correction": 20.0,
15
+ "predict_exact_match_for_keyword_tagging": 44.0,
16
+ "predict_exact_match_for_overlap_extraction": 10.0,
17
+ "predict_exact_match_for_question_rewriting": 0.0,
18
+ "predict_exact_match_for_task020_mctaco_span_based_question": 40.0,
19
+ "predict_exact_match_for_task033_winogrande_answer_generation": 40.0,
20
+ "predict_exact_match_for_task034_winogrande_question_modification_object": 0.0,
21
+ "predict_exact_match_for_task035_winogrande_question_modification_person": 0.0,
22
+ "predict_exact_match_for_task036_qasc_topic_word_to_generate_related_fact": 40.0,
23
+ "predict_exact_match_for_task039_qasc_find_overlapping_words": 20.0,
24
+ "predict_exact_match_for_task050_multirc_answerability": 100.0,
25
+ "predict_exact_match_for_task102_commongen_sentence_generation": 0.0,
26
+ "predict_exact_match_for_task1152_bard_analogical_reasoning_causation": 20.0,
27
+ "predict_exact_match_for_task1153_bard_analogical_reasoning_affordance": 20.0,
28
+ "predict_exact_match_for_task1154_bard_analogical_reasoning_travel": 40.0,
29
+ "predict_exact_match_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
30
+ "predict_exact_match_for_task1156_bard_analogical_reasoning_tools": 40.0,
31
+ "predict_exact_match_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
32
+ "predict_exact_match_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
33
+ "predict_exact_match_for_task1159_bard_analogical_reasoning_containers": 60.0,
34
+ "predict_exact_match_for_task1161_coda19_title_generation": 0.0,
35
+ "predict_exact_match_for_task1195_disflqa_disfluent_to_fluent_conversion": 0.0,
36
+ "predict_exact_match_for_task121_zest_text_modification": 0.0,
37
+ "predict_exact_match_for_task133_winowhy_reason_plausibility_detection": 20.0,
38
+ "predict_exact_match_for_task1342_amazon_us_reviews_title": 0.0,
39
+ "predict_exact_match_for_task1344_glue_entailment_classification": 40.0,
40
+ "predict_exact_match_for_task1345_glue_qqp_question_paraprashing": 0.0,
41
+ "predict_exact_match_for_task1356_xlsum_title_generation": 0.0,
42
+ "predict_exact_match_for_task1358_xlsum_title_generation": 0.0,
43
+ "predict_exact_match_for_task1385_anli_r1_entailment": 0.0,
44
+ "predict_exact_match_for_task1386_anli_r2_entailment": 0.0,
45
+ "predict_exact_match_for_task1387_anli_r3_entailment": 0.0,
46
+ "predict_exact_match_for_task1388_cb_entailment": 0.0,
47
+ "predict_exact_match_for_task1390_wscfixed_coreference": 60.0,
48
+ "predict_exact_match_for_task1391_winogrande_easy_answer_generation": 100.0,
49
+ "predict_exact_match_for_task1393_superglue_copa_text_completion": 100.0,
50
+ "predict_exact_match_for_task1394_meta_woz_task_classification": 80.0,
51
+ "predict_exact_match_for_task1407_dart_question_generation": 0.0,
52
+ "predict_exact_match_for_task1409_dart_text_generation": 0.0,
53
+ "predict_exact_match_for_task1439_doqa_cooking_isanswerable": 40.0,
54
+ "predict_exact_match_for_task1442_doqa_movies_isanswerable": 80.0,
55
+ "predict_exact_match_for_task1516_imppres_naturallanguageinference": 20.0,
56
+ "predict_exact_match_for_task1529_scitail1.1_classification": 40.0,
57
+ "predict_exact_match_for_task1531_daily_dialog_type_classification": 40.0,
58
+ "predict_exact_match_for_task1533_daily_dialog_formal_classification": 60.0,
59
+ "predict_exact_match_for_task1534_daily_dialog_question_classification": 40.0,
60
+ "predict_exact_match_for_task1540_parsed_pdfs_summarization": 0.0,
61
+ "predict_exact_match_for_task1554_scitail_classification": 40.0,
62
+ "predict_exact_match_for_task1557_jfleg_answer_generation": 20.0,
63
+ "predict_exact_match_for_task1562_zest_text_modification": 0.0,
64
+ "predict_exact_match_for_task1586_scifact_title_generation": 0.0,
65
+ "predict_exact_match_for_task1598_nyc_long_text_generation": 0.0,
66
+ "predict_exact_match_for_task1612_sick_label_classification": 40.0,
67
+ "predict_exact_match_for_task1615_sick_tclassify_b_relation_a": 60.0,
68
+ "predict_exact_match_for_task1622_disfl_qa_text_modication": 0.0,
69
+ "predict_exact_match_for_task1624_disfl_qa_question_yesno_classification": 60.0,
70
+ "predict_exact_match_for_task1631_openpi_answer_generation": 0.0,
71
+ "predict_exact_match_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
72
+ "predict_exact_match_for_task1659_title_generation": 0.0,
73
+ "predict_exact_match_for_task1664_winobias_text_generation": 60.0,
74
+ "predict_exact_match_for_task1728_web_nlg_data_to_text": 0.0,
75
+ "predict_exact_match_for_task190_snli_classification": 40.0,
76
+ "predict_exact_match_for_task199_mnli_classification": 100.0,
77
+ "predict_exact_match_for_task200_mnli_entailment_classification": 80.0,
78
+ "predict_exact_match_for_task201_mnli_neutral_classification": 0.0,
79
+ "predict_exact_match_for_task202_mnli_contradiction_classification": 80.0,
80
+ "predict_exact_match_for_task219_rocstories_title_answer_generation": 0.0,
81
+ "predict_exact_match_for_task220_rocstories_title_classification": 100.0,
82
+ "predict_exact_match_for_task226_english_language_answer_relevance_classification": 40.0,
83
+ "predict_exact_match_for_task232_iirc_link_number_classification": 60.0,
84
+ "predict_exact_match_for_task233_iirc_link_exists_classification": 40.0,
85
+ "predict_exact_match_for_task242_tweetqa_classification": 80.0,
86
+ "predict_exact_match_for_task249_enhanced_wsc_pronoun_disambiguation": 20.0,
87
+ "predict_exact_match_for_task281_points_of_correspondence": 0.0,
88
+ "predict_exact_match_for_task288_gigaword_summarization": 0.0,
89
+ "predict_exact_match_for_task290_tellmewhy_question_answerability": 60.0,
90
+ "predict_exact_match_for_task304_numeric_fused_head_resolution": 0.0,
91
+ "predict_exact_match_for_task329_gap_classification": 60.0,
92
+ "predict_exact_match_for_task330_gap_answer_generation": 80.0,
93
+ "predict_exact_match_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
94
+ "predict_exact_match_for_task362_spolin_yesand_prompt_response_sub_classification": 20.0,
95
+ "predict_exact_match_for_task391_causal_relationship": 40.0,
96
+ "predict_exact_match_for_task392_inverse_causal_relationship": 80.0,
97
+ "predict_exact_match_for_task393_plausible_result_generation": 0.0,
98
+ "predict_exact_match_for_task401_numeric_fused_head_reference": 20.0,
99
+ "predict_exact_match_for_task402_grailqa_paraphrase_generation": 0.0,
100
+ "predict_exact_match_for_task418_persent_title_generation": 0.0,
101
+ "predict_exact_match_for_task442_com_qa_paraphrase_question_generation": 0.0,
102
+ "predict_exact_match_for_task500_scruples_anecdotes_title_generation": 0.0,
103
+ "predict_exact_match_for_task510_reddit_tifu_title_summarization": 0.0,
104
+ "predict_exact_match_for_task520_aquamuse_answer_given_in_passage": 100.0,
105
+ "predict_exact_match_for_task569_recipe_nlg_text_generation": 0.0,
106
+ "predict_exact_match_for_task602_wikitext-103_answer_generation": 0.0,
107
+ "predict_exact_match_for_task613_politifact_text_generation": 0.0,
108
+ "predict_exact_match_for_task614_glucose_cause_event_detection": 0.0,
109
+ "predict_exact_match_for_task619_ohsumed_abstract_title_generation": 0.0,
110
+ "predict_exact_match_for_task620_ohsumed_medical_subject_headings_answer_generation": 0.0,
111
+ "predict_exact_match_for_task623_ohsumed_yes_no_answer_generation": 80.0,
112
+ "predict_exact_match_for_task640_esnli_classification": 40.0,
113
+ "predict_exact_match_for_task641_esnli_classification": 60.0,
114
+ "predict_exact_match_for_task642_esnli_classification": 40.0,
115
+ "predict_exact_match_for_task645_summarization": 100.0,
116
+ "predict_exact_match_for_task648_answer_generation": 0.0,
117
+ "predict_exact_match_for_task670_ambigqa_question_generation": 0.0,
118
+ "predict_exact_match_for_task671_ambigqa_text_generation": 0.0,
119
+ "predict_exact_match_for_task677_ollie_sentence_answer_generation": 0.0,
120
+ "predict_exact_match_for_task738_perspectrum_classification": 60.0,
121
+ "predict_exact_match_for_task743_eurlex_summarization": 0.0,
122
+ "predict_exact_match_for_task760_msr_sqa_long_text_generation": 0.0,
123
+ "predict_exact_match_for_task769_qed_summarization": 100.0,
124
+ "predict_exact_match_for_task827_copa_commonsense_reasoning": 80.0,
125
+ "predict_exact_match_for_task828_copa_commonsense_cause_effect": 60.0,
126
+ "predict_exact_match_for_task879_schema_guided_dstc8_classification": 100.0,
127
+ "predict_exact_match_for_task880_schema_guided_dstc8_classification": 20.0,
128
+ "predict_exact_match_for_task890_gcwd_classification": 40.0,
129
+ "predict_exact_match_for_task891_gap_coreference_resolution": 40.0,
130
+ "predict_exact_match_for_task892_gap_reverse_coreference_resolution": 40.0,
131
+ "predict_exact_match_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
132
+ "predict_exact_match_for_task935_defeasible_nli_atomic_classification": 40.0,
133
+ "predict_exact_match_for_task936_defeasible_nli_snli_classification": 40.0,
134
+ "predict_exact_match_for_task937_defeasible_nli_social_classification": 20.0,
135
+ "predict_exact_match_for_task957_e2e_nlg_text_generation_generate": 0.0,
136
+ "predict_exact_match_for_task970_sherliic_causal_relationship": 100.0,
137
+ "predict_exact_match_for_textual_entailment": 40.8333,
138
+ "predict_exact_match_for_title_generation": 11.1111,
139
+ "predict_exact_match_for_word_analogy": 47.5,
140
+ "predict_f1": 48.7192,
141
+ "predict_f1_for_answerability_classification": 69.7436,
142
+ "predict_f1_for_cause_effect_classification": 67.751,
143
+ "predict_f1_for_coreference_resolution": 56.8571,
144
+ "predict_f1_for_data_to_text": 37.7596,
145
+ "predict_f1_for_dialogue_act_recognition": 54.2857,
146
+ "predict_f1_for_grammar_error_correction": 71.7726,
147
+ "predict_f1_for_keyword_tagging": 60.1333,
148
+ "predict_f1_for_overlap_extraction": 23.8141,
149
+ "predict_f1_for_question_rewriting": 56.1871,
150
+ "predict_f1_for_task020_mctaco_span_based_question": 40.0,
151
+ "predict_f1_for_task033_winogrande_answer_generation": 66.6667,
152
+ "predict_f1_for_task034_winogrande_question_modification_object": 61.7974,
153
+ "predict_f1_for_task035_winogrande_question_modification_person": 58.9922,
154
+ "predict_f1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
155
+ "predict_f1_for_task039_qasc_find_overlapping_words": 20.0,
156
+ "predict_f1_for_task050_multirc_answerability": 100.0,
157
+ "predict_f1_for_task102_commongen_sentence_generation": 49.0256,
158
+ "predict_f1_for_task1152_bard_analogical_reasoning_causation": 20.0,
159
+ "predict_f1_for_task1153_bard_analogical_reasoning_affordance": 20.0,
160
+ "predict_f1_for_task1154_bard_analogical_reasoning_travel": 40.0,
161
+ "predict_f1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
162
+ "predict_f1_for_task1156_bard_analogical_reasoning_tools": 40.0,
163
+ "predict_f1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
164
+ "predict_f1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
165
+ "predict_f1_for_task1159_bard_analogical_reasoning_containers": 60.0,
166
+ "predict_f1_for_task1161_coda19_title_generation": 19.4872,
167
+ "predict_f1_for_task1195_disflqa_disfluent_to_fluent_conversion": 62.1678,
168
+ "predict_f1_for_task121_zest_text_modification": 42.083,
169
+ "predict_f1_for_task133_winowhy_reason_plausibility_detection": 20.0,
170
+ "predict_f1_for_task1342_amazon_us_reviews_title": 8.9697,
171
+ "predict_f1_for_task1344_glue_entailment_classification": 40.0,
172
+ "predict_f1_for_task1345_glue_qqp_question_paraprashing": 21.6667,
173
+ "predict_f1_for_task1356_xlsum_title_generation": 16.0,
174
+ "predict_f1_for_task1358_xlsum_title_generation": 27.1161,
175
+ "predict_f1_for_task1385_anli_r1_entailment": 0.0,
176
+ "predict_f1_for_task1386_anli_r2_entailment": 0.0,
177
+ "predict_f1_for_task1387_anli_r3_entailment": 0.0,
178
+ "predict_f1_for_task1388_cb_entailment": 0.0,
179
+ "predict_f1_for_task1390_wscfixed_coreference": 60.0,
180
+ "predict_f1_for_task1391_winogrande_easy_answer_generation": 100.0,
181
+ "predict_f1_for_task1393_superglue_copa_text_completion": 100.0,
182
+ "predict_f1_for_task1394_meta_woz_task_classification": 80.0,
183
+ "predict_f1_for_task1407_dart_question_generation": 23.7903,
184
+ "predict_f1_for_task1409_dart_text_generation": 39.9284,
185
+ "predict_f1_for_task1439_doqa_cooking_isanswerable": 40.0,
186
+ "predict_f1_for_task1442_doqa_movies_isanswerable": 80.0,
187
+ "predict_f1_for_task1516_imppres_naturallanguageinference": 20.0,
188
+ "predict_f1_for_task1529_scitail1.1_classification": 40.0,
189
+ "predict_f1_for_task1531_daily_dialog_type_classification": 40.0,
190
+ "predict_f1_for_task1533_daily_dialog_formal_classification": 60.0,
191
+ "predict_f1_for_task1534_daily_dialog_question_classification": 40.0,
192
+ "predict_f1_for_task1540_parsed_pdfs_summarization": 25.1948,
193
+ "predict_f1_for_task1554_scitail_classification": 40.0,
194
+ "predict_f1_for_task1557_jfleg_answer_generation": 71.7726,
195
+ "predict_f1_for_task1562_zest_text_modification": 45.6913,
196
+ "predict_f1_for_task1586_scifact_title_generation": 16.1509,
197
+ "predict_f1_for_task1598_nyc_long_text_generation": 27.3109,
198
+ "predict_f1_for_task1612_sick_label_classification": 40.0,
199
+ "predict_f1_for_task1615_sick_tclassify_b_relation_a": 60.0,
200
+ "predict_f1_for_task1622_disfl_qa_text_modication": 64.3711,
201
+ "predict_f1_for_task1624_disfl_qa_question_yesno_classification": 60.0,
202
+ "predict_f1_for_task1631_openpi_answer_generation": 77.0305,
203
+ "predict_f1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
204
+ "predict_f1_for_task1659_title_generation": 25.0379,
205
+ "predict_f1_for_task1664_winobias_text_generation": 80.0,
206
+ "predict_f1_for_task1728_web_nlg_data_to_text": 42.0333,
207
+ "predict_f1_for_task190_snli_classification": 40.0,
208
+ "predict_f1_for_task199_mnli_classification": 100.0,
209
+ "predict_f1_for_task200_mnli_entailment_classification": 80.0,
210
+ "predict_f1_for_task201_mnli_neutral_classification": 0.0,
211
+ "predict_f1_for_task202_mnli_contradiction_classification": 80.0,
212
+ "predict_f1_for_task219_rocstories_title_answer_generation": 15.7143,
213
+ "predict_f1_for_task220_rocstories_title_classification": 100.0,
214
+ "predict_f1_for_task226_english_language_answer_relevance_classification": 40.0,
215
+ "predict_f1_for_task232_iirc_link_number_classification": 60.0,
216
+ "predict_f1_for_task233_iirc_link_exists_classification": 40.0,
217
+ "predict_f1_for_task242_tweetqa_classification": 80.0,
218
+ "predict_f1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
219
+ "predict_f1_for_task281_points_of_correspondence": 27.6283,
220
+ "predict_f1_for_task288_gigaword_summarization": 10.3571,
221
+ "predict_f1_for_task290_tellmewhy_question_answerability": 86.6667,
222
+ "predict_f1_for_task304_numeric_fused_head_resolution": 13.3333,
223
+ "predict_f1_for_task329_gap_classification": 60.0,
224
+ "predict_f1_for_task330_gap_answer_generation": 96.0,
225
+ "predict_f1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
226
+ "predict_f1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
227
+ "predict_f1_for_task391_causal_relationship": 80.0,
228
+ "predict_f1_for_task392_inverse_causal_relationship": 93.3333,
229
+ "predict_f1_for_task393_plausible_result_generation": 38.1587,
230
+ "predict_f1_for_task401_numeric_fused_head_reference": 43.3333,
231
+ "predict_f1_for_task402_grailqa_paraphrase_generation": 58.1832,
232
+ "predict_f1_for_task418_persent_title_generation": 15.3458,
233
+ "predict_f1_for_task442_com_qa_paraphrase_question_generation": 64.9744,
234
+ "predict_f1_for_task500_scruples_anecdotes_title_generation": 3.0769,
235
+ "predict_f1_for_task510_reddit_tifu_title_summarization": 38.265,
236
+ "predict_f1_for_task520_aquamuse_answer_given_in_passage": 100.0,
237
+ "predict_f1_for_task569_recipe_nlg_text_generation": 20.6061,
238
+ "predict_f1_for_task602_wikitext-103_answer_generation": 0.0,
239
+ "predict_f1_for_task613_politifact_text_generation": 13.3333,
240
+ "predict_f1_for_task614_glucose_cause_event_detection": 22.7647,
241
+ "predict_f1_for_task619_ohsumed_abstract_title_generation": 34.2082,
242
+ "predict_f1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
243
+ "predict_f1_for_task623_ohsumed_yes_no_answer_generation": 80.0,
244
+ "predict_f1_for_task640_esnli_classification": 40.0,
245
+ "predict_f1_for_task641_esnli_classification": 60.0,
246
+ "predict_f1_for_task642_esnli_classification": 40.0,
247
+ "predict_f1_for_task645_summarization": 100.0,
248
+ "predict_f1_for_task648_answer_generation": 13.3333,
249
+ "predict_f1_for_task670_ambigqa_question_generation": 75.6778,
250
+ "predict_f1_for_task671_ambigqa_text_generation": 62.4536,
251
+ "predict_f1_for_task677_ollie_sentence_answer_generation": 27.0794,
252
+ "predict_f1_for_task738_perspectrum_classification": 60.0,
253
+ "predict_f1_for_task743_eurlex_summarization": 27.1719,
254
+ "predict_f1_for_task760_msr_sqa_long_text_generation": 0.7946,
255
+ "predict_f1_for_task769_qed_summarization": 100.0,
256
+ "predict_f1_for_task827_copa_commonsense_reasoning": 80.0,
257
+ "predict_f1_for_task828_copa_commonsense_cause_effect": 60.0,
258
+ "predict_f1_for_task879_schema_guided_dstc8_classification": 100.0,
259
+ "predict_f1_for_task880_schema_guided_dstc8_classification": 20.0,
260
+ "predict_f1_for_task890_gcwd_classification": 40.0,
261
+ "predict_f1_for_task891_gap_coreference_resolution": 50.0,
262
+ "predict_f1_for_task892_gap_reverse_coreference_resolution": 40.0,
263
+ "predict_f1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
264
+ "predict_f1_for_task935_defeasible_nli_atomic_classification": 40.0,
265
+ "predict_f1_for_task936_defeasible_nli_snli_classification": 40.0,
266
+ "predict_f1_for_task937_defeasible_nli_social_classification": 20.0,
267
+ "predict_f1_for_task957_e2e_nlg_text_generation_generate": 52.8436,
268
+ "predict_f1_for_task970_sherliic_causal_relationship": 100.0,
269
+ "predict_f1_for_textual_entailment": 40.8333,
270
+ "predict_f1_for_title_generation": 27.9279,
271
+ "predict_f1_for_word_analogy": 47.5,
272
+ "predict_gen_len": 4.8739,
273
+ "predict_global_step": 17980,
274
+ "predict_loss": 1.446231484413147,
275
+ "predict_rouge1": 51.3811,
276
+ "predict_rouge1_for_answerability_classification": 69.7436,
277
+ "predict_rouge1_for_cause_effect_classification": 67.4521,
278
+ "predict_rouge1_for_coreference_resolution": 56.9388,
279
+ "predict_rouge1_for_data_to_text": 39.3261,
280
+ "predict_rouge1_for_dialogue_act_recognition": 61.9048,
281
+ "predict_rouge1_for_grammar_error_correction": 73.7928,
282
+ "predict_rouge1_for_keyword_tagging": 64.1333,
283
+ "predict_rouge1_for_overlap_extraction": 23.0406,
284
+ "predict_rouge1_for_question_rewriting": 57.6828,
285
+ "predict_rouge1_for_task020_mctaco_span_based_question": 40.0,
286
+ "predict_rouge1_for_task033_winogrande_answer_generation": 66.6667,
287
+ "predict_rouge1_for_task034_winogrande_question_modification_object": 61.7974,
288
+ "predict_rouge1_for_task035_winogrande_question_modification_person": 58.9922,
289
+ "predict_rouge1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
290
+ "predict_rouge1_for_task039_qasc_find_overlapping_words": 20.0,
291
+ "predict_rouge1_for_task050_multirc_answerability": 100.0,
292
+ "predict_rouge1_for_task102_commongen_sentence_generation": 60.8462,
293
+ "predict_rouge1_for_task1152_bard_analogical_reasoning_causation": 20.0,
294
+ "predict_rouge1_for_task1153_bard_analogical_reasoning_affordance": 20.0,
295
+ "predict_rouge1_for_task1154_bard_analogical_reasoning_travel": 40.0,
296
+ "predict_rouge1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
297
+ "predict_rouge1_for_task1156_bard_analogical_reasoning_tools": 40.0,
298
+ "predict_rouge1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
299
+ "predict_rouge1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
300
+ "predict_rouge1_for_task1159_bard_analogical_reasoning_containers": 60.0,
301
+ "predict_rouge1_for_task1161_coda19_title_generation": 25.5618,
302
+ "predict_rouge1_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873,
303
+ "predict_rouge1_for_task121_zest_text_modification": 47.5745,
304
+ "predict_rouge1_for_task133_winowhy_reason_plausibility_detection": 20.0,
305
+ "predict_rouge1_for_task1342_amazon_us_reviews_title": 8.9697,
306
+ "predict_rouge1_for_task1344_glue_entailment_classification": 40.0,
307
+ "predict_rouge1_for_task1345_glue_qqp_question_paraprashing": 21.3333,
308
+ "predict_rouge1_for_task1356_xlsum_title_generation": 18.6667,
309
+ "predict_rouge1_for_task1358_xlsum_title_generation": 26.563,
310
+ "predict_rouge1_for_task1385_anli_r1_entailment": 20.0,
311
+ "predict_rouge1_for_task1386_anli_r2_entailment": 40.0,
312
+ "predict_rouge1_for_task1387_anli_r3_entailment": 20.0,
313
+ "predict_rouge1_for_task1388_cb_entailment": 40.0,
314
+ "predict_rouge1_for_task1390_wscfixed_coreference": 60.0,
315
+ "predict_rouge1_for_task1391_winogrande_easy_answer_generation": 100.0,
316
+ "predict_rouge1_for_task1393_superglue_copa_text_completion": 100.0,
317
+ "predict_rouge1_for_task1394_meta_woz_task_classification": 93.3333,
318
+ "predict_rouge1_for_task1407_dart_question_generation": 24.086,
319
+ "predict_rouge1_for_task1409_dart_text_generation": 41.9306,
320
+ "predict_rouge1_for_task1439_doqa_cooking_isanswerable": 40.0,
321
+ "predict_rouge1_for_task1442_doqa_movies_isanswerable": 80.0,
322
+ "predict_rouge1_for_task1516_imppres_naturallanguageinference": 20.0,
323
+ "predict_rouge1_for_task1529_scitail1.1_classification": 40.0,
324
+ "predict_rouge1_for_task1531_daily_dialog_type_classification": 40.0,
325
+ "predict_rouge1_for_task1533_daily_dialog_formal_classification": 60.0,
326
+ "predict_rouge1_for_task1534_daily_dialog_question_classification": 40.0,
327
+ "predict_rouge1_for_task1540_parsed_pdfs_summarization": 28.0519,
328
+ "predict_rouge1_for_task1554_scitail_classification": 40.0,
329
+ "predict_rouge1_for_task1557_jfleg_answer_generation": 73.7928,
330
+ "predict_rouge1_for_task1562_zest_text_modification": 47.5095,
331
+ "predict_rouge1_for_task1586_scifact_title_generation": 17.3747,
332
+ "predict_rouge1_for_task1598_nyc_long_text_generation": 27.0,
333
+ "predict_rouge1_for_task1612_sick_label_classification": 40.0,
334
+ "predict_rouge1_for_task1615_sick_tclassify_b_relation_a": 86.6667,
335
+ "predict_rouge1_for_task1622_disfl_qa_text_modication": 66.5018,
336
+ "predict_rouge1_for_task1624_disfl_qa_question_yesno_classification": 60.0,
337
+ "predict_rouge1_for_task1631_openpi_answer_generation": 77.0305,
338
+ "predict_rouge1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
339
+ "predict_rouge1_for_task1659_title_generation": 27.5046,
340
+ "predict_rouge1_for_task1664_winobias_text_generation": 80.0,
341
+ "predict_rouge1_for_task1728_web_nlg_data_to_text": 41.1615,
342
+ "predict_rouge1_for_task190_snli_classification": 40.0,
343
+ "predict_rouge1_for_task199_mnli_classification": 100.0,
344
+ "predict_rouge1_for_task200_mnli_entailment_classification": 80.0,
345
+ "predict_rouge1_for_task201_mnli_neutral_classification": 0.0,
346
+ "predict_rouge1_for_task202_mnli_contradiction_classification": 80.0,
347
+ "predict_rouge1_for_task219_rocstories_title_answer_generation": 20.7143,
348
+ "predict_rouge1_for_task220_rocstories_title_classification": 100.0,
349
+ "predict_rouge1_for_task226_english_language_answer_relevance_classification": 40.0,
350
+ "predict_rouge1_for_task232_iirc_link_number_classification": 60.0,
351
+ "predict_rouge1_for_task233_iirc_link_exists_classification": 40.0,
352
+ "predict_rouge1_for_task242_tweetqa_classification": 80.0,
353
+ "predict_rouge1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
354
+ "predict_rouge1_for_task281_points_of_correspondence": 26.0813,
355
+ "predict_rouge1_for_task288_gigaword_summarization": 23.8571,
356
+ "predict_rouge1_for_task290_tellmewhy_question_answerability": 86.6667,
357
+ "predict_rouge1_for_task304_numeric_fused_head_resolution": 13.3333,
358
+ "predict_rouge1_for_task329_gap_classification": 60.0,
359
+ "predict_rouge1_for_task330_gap_answer_generation": 97.1429,
360
+ "predict_rouge1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
361
+ "predict_rouge1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
362
+ "predict_rouge1_for_task391_causal_relationship": 80.0,
363
+ "predict_rouge1_for_task392_inverse_causal_relationship": 93.3333,
364
+ "predict_rouge1_for_task393_plausible_result_generation": 38.1587,
365
+ "predict_rouge1_for_task401_numeric_fused_head_reference": 43.3333,
366
+ "predict_rouge1_for_task402_grailqa_paraphrase_generation": 60.8498,
367
+ "predict_rouge1_for_task418_persent_title_generation": 15.3458,
368
+ "predict_rouge1_for_task442_com_qa_paraphrase_question_generation": 66.8333,
369
+ "predict_rouge1_for_task500_scruples_anecdotes_title_generation": 2.8571,
370
+ "predict_rouge1_for_task510_reddit_tifu_title_summarization": 39.811,
371
+ "predict_rouge1_for_task520_aquamuse_answer_given_in_passage": 100.0,
372
+ "predict_rouge1_for_task569_recipe_nlg_text_generation": 24.2424,
373
+ "predict_rouge1_for_task602_wikitext-103_answer_generation": 0.0,
374
+ "predict_rouge1_for_task613_politifact_text_generation": 33.3333,
375
+ "predict_rouge1_for_task614_glucose_cause_event_detection": 20.673,
376
+ "predict_rouge1_for_task619_ohsumed_abstract_title_generation": 38.4305,
377
+ "predict_rouge1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
378
+ "predict_rouge1_for_task623_ohsumed_yes_no_answer_generation": 80.0,
379
+ "predict_rouge1_for_task640_esnli_classification": 40.0,
380
+ "predict_rouge1_for_task641_esnli_classification": 60.0,
381
+ "predict_rouge1_for_task642_esnli_classification": 40.0,
382
+ "predict_rouge1_for_task645_summarization": 100.0,
383
+ "predict_rouge1_for_task648_answer_generation": 13.3333,
384
+ "predict_rouge1_for_task670_ambigqa_question_generation": 75.6778,
385
+ "predict_rouge1_for_task671_ambigqa_text_generation": 62.4536,
386
+ "predict_rouge1_for_task677_ollie_sentence_answer_generation": 28.5268,
387
+ "predict_rouge1_for_task738_perspectrum_classification": 60.0,
388
+ "predict_rouge1_for_task743_eurlex_summarization": 31.4423,
389
+ "predict_rouge1_for_task760_msr_sqa_long_text_generation": 0.8103,
390
+ "predict_rouge1_for_task769_qed_summarization": 100.0,
391
+ "predict_rouge1_for_task827_copa_commonsense_reasoning": 80.0,
392
+ "predict_rouge1_for_task828_copa_commonsense_cause_effect": 60.0,
393
+ "predict_rouge1_for_task879_schema_guided_dstc8_classification": 100.0,
394
+ "predict_rouge1_for_task880_schema_guided_dstc8_classification": 60.0,
395
+ "predict_rouge1_for_task890_gcwd_classification": 40.0,
396
+ "predict_rouge1_for_task891_gap_coreference_resolution": 50.0,
397
+ "predict_rouge1_for_task892_gap_reverse_coreference_resolution": 40.0,
398
+ "predict_rouge1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
399
+ "predict_rouge1_for_task935_defeasible_nli_atomic_classification": 60.0,
400
+ "predict_rouge1_for_task936_defeasible_nli_snli_classification": 40.0,
401
+ "predict_rouge1_for_task937_defeasible_nli_social_classification": 20.0,
402
+ "predict_rouge1_for_task957_e2e_nlg_text_generation_generate": 52.5426,
403
+ "predict_rouge1_for_task970_sherliic_causal_relationship": 100.0,
404
+ "predict_rouge1_for_textual_entailment": 47.7778,
405
+ "predict_rouge1_for_title_generation": 30.5218,
406
+ "predict_rouge1_for_word_analogy": 47.5,
407
+ "predict_rougeL": 50.4639,
408
+ "predict_rougeL_for_answerability_classification": 69.7436,
409
+ "predict_rougeL_for_cause_effect_classification": 67.4521,
410
+ "predict_rougeL_for_coreference_resolution": 56.9388,
411
+ "predict_rougeL_for_data_to_text": 34.4403,
412
+ "predict_rougeL_for_dialogue_act_recognition": 61.9048,
413
+ "predict_rougeL_for_grammar_error_correction": 73.7928,
414
+ "predict_rougeL_for_keyword_tagging": 64.1333,
415
+ "predict_rougeL_for_overlap_extraction": 22.624,
416
+ "predict_rougeL_for_question_rewriting": 54.491,
417
+ "predict_rougeL_for_task020_mctaco_span_based_question": 40.0,
418
+ "predict_rougeL_for_task033_winogrande_answer_generation": 66.6667,
419
+ "predict_rougeL_for_task034_winogrande_question_modification_object": 61.7974,
420
+ "predict_rougeL_for_task035_winogrande_question_modification_person": 58.9922,
421
+ "predict_rougeL_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
422
+ "predict_rougeL_for_task039_qasc_find_overlapping_words": 20.0,
423
+ "predict_rougeL_for_task050_multirc_answerability": 100.0,
424
+ "predict_rougeL_for_task102_commongen_sentence_generation": 47.6154,
425
+ "predict_rougeL_for_task1152_bard_analogical_reasoning_causation": 20.0,
426
+ "predict_rougeL_for_task1153_bard_analogical_reasoning_affordance": 20.0,
427
+ "predict_rougeL_for_task1154_bard_analogical_reasoning_travel": 40.0,
428
+ "predict_rougeL_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
429
+ "predict_rougeL_for_task1156_bard_analogical_reasoning_tools": 40.0,
430
+ "predict_rougeL_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
431
+ "predict_rougeL_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
432
+ "predict_rougeL_for_task1159_bard_analogical_reasoning_containers": 60.0,
433
+ "predict_rougeL_for_task1161_coda19_title_generation": 22.8951,
434
+ "predict_rougeL_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873,
435
+ "predict_rougeL_for_task121_zest_text_modification": 41.2253,
436
+ "predict_rougeL_for_task133_winowhy_reason_plausibility_detection": 20.0,
437
+ "predict_rougeL_for_task1342_amazon_us_reviews_title": 6.303,
438
+ "predict_rougeL_for_task1344_glue_entailment_classification": 40.0,
439
+ "predict_rougeL_for_task1345_glue_qqp_question_paraprashing": 21.3333,
440
+ "predict_rougeL_for_task1356_xlsum_title_generation": 16.0,
441
+ "predict_rougeL_for_task1358_xlsum_title_generation": 24.2101,
442
+ "predict_rougeL_for_task1385_anli_r1_entailment": 20.0,
443
+ "predict_rougeL_for_task1386_anli_r2_entailment": 40.0,
444
+ "predict_rougeL_for_task1387_anli_r3_entailment": 20.0,
445
+ "predict_rougeL_for_task1388_cb_entailment": 40.0,
446
+ "predict_rougeL_for_task1390_wscfixed_coreference": 60.0,
447
+ "predict_rougeL_for_task1391_winogrande_easy_answer_generation": 100.0,
448
+ "predict_rougeL_for_task1393_superglue_copa_text_completion": 100.0,
449
+ "predict_rougeL_for_task1394_meta_woz_task_classification": 93.3333,
450
+ "predict_rougeL_for_task1407_dart_question_generation": 20.8043,
451
+ "predict_rougeL_for_task1409_dart_text_generation": 36.5007,
452
+ "predict_rougeL_for_task1439_doqa_cooking_isanswerable": 40.0,
453
+ "predict_rougeL_for_task1442_doqa_movies_isanswerable": 80.0,
454
+ "predict_rougeL_for_task1516_imppres_naturallanguageinference": 20.0,
455
+ "predict_rougeL_for_task1529_scitail1.1_classification": 40.0,
456
+ "predict_rougeL_for_task1531_daily_dialog_type_classification": 40.0,
457
+ "predict_rougeL_for_task1533_daily_dialog_formal_classification": 60.0,
458
+ "predict_rougeL_for_task1534_daily_dialog_question_classification": 40.0,
459
+ "predict_rougeL_for_task1540_parsed_pdfs_summarization": 28.0519,
460
+ "predict_rougeL_for_task1554_scitail_classification": 40.0,
461
+ "predict_rougeL_for_task1557_jfleg_answer_generation": 73.7928,
462
+ "predict_rougeL_for_task1562_zest_text_modification": 38.5095,
463
+ "predict_rougeL_for_task1586_scifact_title_generation": 17.3747,
464
+ "predict_rougeL_for_task1598_nyc_long_text_generation": 25.8889,
465
+ "predict_rougeL_for_task1612_sick_label_classification": 40.0,
466
+ "predict_rougeL_for_task1615_sick_tclassify_b_relation_a": 86.6667,
467
+ "predict_rougeL_for_task1622_disfl_qa_text_modication": 57.2711,
468
+ "predict_rougeL_for_task1624_disfl_qa_question_yesno_classification": 60.0,
469
+ "predict_rougeL_for_task1631_openpi_answer_generation": 74.5305,
470
+ "predict_rougeL_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
471
+ "predict_rougeL_for_task1659_title_generation": 22.3799,
472
+ "predict_rougeL_for_task1664_winobias_text_generation": 80.0,
473
+ "predict_rougeL_for_task1728_web_nlg_data_to_text": 32.2577,
474
+ "predict_rougeL_for_task190_snli_classification": 40.0,
475
+ "predict_rougeL_for_task199_mnli_classification": 100.0,
476
+ "predict_rougeL_for_task200_mnli_entailment_classification": 80.0,
477
+ "predict_rougeL_for_task201_mnli_neutral_classification": 0.0,
478
+ "predict_rougeL_for_task202_mnli_contradiction_classification": 80.0,
479
+ "predict_rougeL_for_task219_rocstories_title_answer_generation": 20.7143,
480
+ "predict_rougeL_for_task220_rocstories_title_classification": 100.0,
481
+ "predict_rougeL_for_task226_english_language_answer_relevance_classification": 40.0,
482
+ "predict_rougeL_for_task232_iirc_link_number_classification": 60.0,
483
+ "predict_rougeL_for_task233_iirc_link_exists_classification": 40.0,
484
+ "predict_rougeL_for_task242_tweetqa_classification": 80.0,
485
+ "predict_rougeL_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
486
+ "predict_rougeL_for_task281_points_of_correspondence": 25.248,
487
+ "predict_rougeL_for_task288_gigaword_summarization": 23.8571,
488
+ "predict_rougeL_for_task290_tellmewhy_question_answerability": 86.6667,
489
+ "predict_rougeL_for_task304_numeric_fused_head_resolution": 13.3333,
490
+ "predict_rougeL_for_task329_gap_classification": 60.0,
491
+ "predict_rougeL_for_task330_gap_answer_generation": 97.1429,
492
+ "predict_rougeL_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
493
+ "predict_rougeL_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
494
+ "predict_rougeL_for_task391_causal_relationship": 80.0,
495
+ "predict_rougeL_for_task392_inverse_causal_relationship": 93.3333,
496
+ "predict_rougeL_for_task393_plausible_result_generation": 38.1587,
497
+ "predict_rougeL_for_task401_numeric_fused_head_reference": 43.3333,
498
+ "predict_rougeL_for_task402_grailqa_paraphrase_generation": 55.8974,
499
+ "predict_rougeL_for_task418_persent_title_generation": 10.6399,
500
+ "predict_rougeL_for_task442_com_qa_paraphrase_question_generation": 64.3333,
501
+ "predict_rougeL_for_task500_scruples_anecdotes_title_generation": 2.8571,
502
+ "predict_rougeL_for_task510_reddit_tifu_title_summarization": 39.811,
503
+ "predict_rougeL_for_task520_aquamuse_answer_given_in_passage": 100.0,
504
+ "predict_rougeL_for_task569_recipe_nlg_text_generation": 20.6061,
505
+ "predict_rougeL_for_task602_wikitext-103_answer_generation": 0.0,
506
+ "predict_rougeL_for_task613_politifact_text_generation": 33.3333,
507
+ "predict_rougeL_for_task614_glucose_cause_event_detection": 20.673,
508
+ "predict_rougeL_for_task619_ohsumed_abstract_title_generation": 38.4305,
509
+ "predict_rougeL_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
510
+ "predict_rougeL_for_task623_ohsumed_yes_no_answer_generation": 80.0,
511
+ "predict_rougeL_for_task640_esnli_classification": 40.0,
512
+ "predict_rougeL_for_task641_esnli_classification": 60.0,
513
+ "predict_rougeL_for_task642_esnli_classification": 40.0,
514
+ "predict_rougeL_for_task645_summarization": 100.0,
515
+ "predict_rougeL_for_task648_answer_generation": 13.3333,
516
+ "predict_rougeL_for_task670_ambigqa_question_generation": 75.6778,
517
+ "predict_rougeL_for_task671_ambigqa_text_generation": 59.3766,
518
+ "predict_rougeL_for_task677_ollie_sentence_answer_generation": 28.5268,
519
+ "predict_rougeL_for_task738_perspectrum_classification": 60.0,
520
+ "predict_rougeL_for_task743_eurlex_summarization": 26.0256,
521
+ "predict_rougeL_for_task760_msr_sqa_long_text_generation": 0.8103,
522
+ "predict_rougeL_for_task769_qed_summarization": 100.0,
523
+ "predict_rougeL_for_task827_copa_commonsense_reasoning": 80.0,
524
+ "predict_rougeL_for_task828_copa_commonsense_cause_effect": 60.0,
525
+ "predict_rougeL_for_task879_schema_guided_dstc8_classification": 100.0,
526
+ "predict_rougeL_for_task880_schema_guided_dstc8_classification": 60.0,
527
+ "predict_rougeL_for_task890_gcwd_classification": 40.0,
528
+ "predict_rougeL_for_task891_gap_coreference_resolution": 50.0,
529
+ "predict_rougeL_for_task892_gap_reverse_coreference_resolution": 40.0,
530
+ "predict_rougeL_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
531
+ "predict_rougeL_for_task935_defeasible_nli_atomic_classification": 60.0,
532
+ "predict_rougeL_for_task936_defeasible_nli_snli_classification": 40.0,
533
+ "predict_rougeL_for_task937_defeasible_nli_social_classification": 20.0,
534
+ "predict_rougeL_for_task957_e2e_nlg_text_generation_generate": 43.0283,
535
+ "predict_rougeL_for_task970_sherliic_causal_relationship": 100.0,
536
+ "predict_rougeL_for_textual_entailment": 47.7778,
537
+ "predict_rougeL_for_title_generation": 28.8976,
538
+ "predict_rougeL_for_word_analogy": 47.5,
539
+ "predict_runtime": 99.8176,
540
+ "predict_samples": 595,
541
+ "predict_samples_per_second": 5.961,
542
+ "predict_steps_per_second": 0.1,
543
+ "train_loss": 0.7423103578629032,
544
+ "train_runtime": 191097.123,
545
+ "train_samples": 71917,
546
+ "train_samples_per_second": 0.753,
547
+ "train_steps_per_second": 0.094
548
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/t5-xxl-lm-adapt",
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "d_ff": 10240,
7
+ "d_kv": 64,
8
+ "d_model": 4096,
9
+ "decoder_start_token_id": 0,
10
+ "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "gated-gelu",
13
+ "initializer_factor": 1.0,
14
+ "is_encoder_decoder": true,
15
+ "layer_norm_epsilon": 1e-06,
16
+ "model_type": "t5",
17
+ "num_decoder_layers": 24,
18
+ "num_heads": 64,
19
+ "num_layers": 24,
20
+ "output_past": true,
21
+ "pad_token_id": 0,
22
+ "relative_attention_max_distance": 128,
23
+ "relative_attention_num_buckets": 32,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.18.0",
27
+ "use_cache": true,
28
+ "vocab_size": 32100
29
+ }
eval_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "eval_global_step": 17980,
4
+ "eval_runtime": 0.0055,
5
+ "eval_samples": 0,
6
+ "eval_samples_per_second": 0.0,
7
+ "eval_steps_per_second": 0.0
8
+ }
metrics.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"run_name": "retrain-tk-instruct-minus-a-lot-of-data_t5-xxl-lm-adapt", "train_metrics": [{"train_runtime": 191097.123, "train_samples_per_second": 0.753, "train_steps_per_second": 0.094, "train_loss": 0.7423103578629032, "epoch": 2.0, "train_samples": 71917}], "eval_metrics": [{"eval_global_step": 17980, "eval_runtime": 0.0055, "eval_samples_per_second": 0.0, "eval_steps_per_second": 0.0, "epoch": 2.0, "eval_samples": 0}], "test_metrics": [{"predict_loss": 1.446231484413147, "predict_exact_match": 34.1176, "predict_rouge1": 51.3811, "predict_rougeL": 50.4639, "predict_f1": 48.7192, "predict_exact_match_for_task1356_xlsum_title_generation": 0.0, "predict_rouge1_for_task1356_xlsum_title_generation": 18.6667, "predict_rougeL_for_task1356_xlsum_title_generation": 16.0, "predict_f1_for_task1356_xlsum_title_generation": 16.0, "predict_exact_match_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_rouge1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_rougeL_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_f1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_exact_match_for_task641_esnli_classification": 60.0, "predict_rouge1_for_task641_esnli_classification": 60.0, "predict_rougeL_for_task641_esnli_classification": 60.0, "predict_f1_for_task641_esnli_classification": 60.0, "predict_exact_match_for_task1529_scitail1.1_classification": 40.0, "predict_rouge1_for_task1529_scitail1.1_classification": 40.0, "predict_rougeL_for_task1529_scitail1.1_classification": 40.0, "predict_f1_for_task1529_scitail1.1_classification": 40.0, "predict_exact_match_for_task202_mnli_contradiction_classification": 80.0, "predict_rouge1_for_task202_mnli_contradiction_classification": 80.0, "predict_rougeL_for_task202_mnli_contradiction_classification": 80.0, "predict_f1_for_task202_mnli_contradiction_classification": 80.0, "predict_exact_match_for_task670_ambigqa_question_generation": 0.0, "predict_rouge1_for_task670_ambigqa_question_generation": 75.6778, "predict_rougeL_for_task670_ambigqa_question_generation": 75.6778, "predict_f1_for_task670_ambigqa_question_generation": 75.6778, "predict_exact_match_for_task1393_superglue_copa_text_completion": 100.0, "predict_rouge1_for_task1393_superglue_copa_text_completion": 100.0, "predict_rougeL_for_task1393_superglue_copa_text_completion": 100.0, "predict_f1_for_task1393_superglue_copa_text_completion": 100.0, "predict_exact_match_for_task1344_glue_entailment_classification": 40.0, "predict_rouge1_for_task1344_glue_entailment_classification": 40.0, "predict_rougeL_for_task1344_glue_entailment_classification": 40.0, "predict_f1_for_task1344_glue_entailment_classification": 40.0, "predict_exact_match_for_task288_gigaword_summarization": 0.0, "predict_rouge1_for_task288_gigaword_summarization": 23.8571, "predict_rougeL_for_task288_gigaword_summarization": 23.8571, "predict_f1_for_task288_gigaword_summarization": 10.3571, "predict_exact_match_for_task1387_anli_r3_entailment": 0.0, "predict_rouge1_for_task1387_anli_r3_entailment": 20.0, "predict_rougeL_for_task1387_anli_r3_entailment": 20.0, "predict_f1_for_task1387_anli_r3_entailment": 0.0, "predict_exact_match_for_task1664_winobias_text_generation": 60.0, "predict_rouge1_for_task1664_winobias_text_generation": 80.0, "predict_rougeL_for_task1664_winobias_text_generation": 80.0, "predict_f1_for_task1664_winobias_text_generation": 80.0, "predict_exact_match_for_task1161_coda19_title_generation": 0.0, "predict_rouge1_for_task1161_coda19_title_generation": 25.5618, "predict_rougeL_for_task1161_coda19_title_generation": 22.8951, "predict_f1_for_task1161_coda19_title_generation": 19.4872, "predict_exact_match_for_task880_schema_guided_dstc8_classification": 20.0, "predict_rouge1_for_task880_schema_guided_dstc8_classification": 60.0, "predict_rougeL_for_task880_schema_guided_dstc8_classification": 60.0, "predict_f1_for_task880_schema_guided_dstc8_classification": 20.0, "predict_exact_match_for_task738_perspectrum_classification": 60.0, "predict_rouge1_for_task738_perspectrum_classification": 60.0, "predict_rougeL_for_task738_perspectrum_classification": 60.0, "predict_f1_for_task738_perspectrum_classification": 60.0, "predict_exact_match_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_rouge1_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_rougeL_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_f1_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_exact_match_for_task645_summarization": 100.0, "predict_rouge1_for_task645_summarization": 100.0, "predict_rougeL_for_task645_summarization": 100.0, "predict_f1_for_task645_summarization": 100.0, "predict_exact_match_for_task619_ohsumed_abstract_title_generation": 0.0, "predict_rouge1_for_task619_ohsumed_abstract_title_generation": 38.4305, "predict_rougeL_for_task619_ohsumed_abstract_title_generation": 38.4305, "predict_f1_for_task619_ohsumed_abstract_title_generation": 34.2082, "predict_exact_match_for_task1728_web_nlg_data_to_text": 0.0, "predict_rouge1_for_task1728_web_nlg_data_to_text": 41.1615, "predict_rougeL_for_task1728_web_nlg_data_to_text": 32.2577, "predict_f1_for_task1728_web_nlg_data_to_text": 42.0333, "predict_exact_match_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_rouge1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_rougeL_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_f1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_exact_match_for_task648_answer_generation": 0.0, "predict_rouge1_for_task648_answer_generation": 13.3333, "predict_rougeL_for_task648_answer_generation": 13.3333, "predict_f1_for_task648_answer_generation": 13.3333, "predict_exact_match_for_task242_tweetqa_classification": 80.0, "predict_rouge1_for_task242_tweetqa_classification": 80.0, "predict_rougeL_for_task242_tweetqa_classification": 80.0, "predict_f1_for_task242_tweetqa_classification": 80.0, "predict_exact_match_for_task620_ohsumed_medical_subject_headings_answer_generation": 0.0, "predict_rouge1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_rougeL_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_f1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_exact_match_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_rouge1_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_rougeL_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_f1_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_exact_match_for_task500_scruples_anecdotes_title_generation": 0.0, "predict_rouge1_for_task500_scruples_anecdotes_title_generation": 2.8571, "predict_rougeL_for_task500_scruples_anecdotes_title_generation": 2.8571, "predict_f1_for_task500_scruples_anecdotes_title_generation": 3.0769, "predict_exact_match_for_task890_gcwd_classification": 40.0, "predict_rouge1_for_task890_gcwd_classification": 40.0, "predict_rougeL_for_task890_gcwd_classification": 40.0, "predict_f1_for_task890_gcwd_classification": 40.0, "predict_exact_match_for_task039_qasc_find_overlapping_words": 20.0, "predict_rouge1_for_task039_qasc_find_overlapping_words": 20.0, "predict_rougeL_for_task039_qasc_find_overlapping_words": 20.0, "predict_f1_for_task039_qasc_find_overlapping_words": 20.0, "predict_exact_match_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_rouge1_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_rougeL_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_f1_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_exact_match_for_task1612_sick_label_classification": 40.0, "predict_rouge1_for_task1612_sick_label_classification": 40.0, "predict_rougeL_for_task1612_sick_label_classification": 40.0, "predict_f1_for_task1612_sick_label_classification": 40.0, "predict_exact_match_for_task1442_doqa_movies_isanswerable": 80.0, "predict_rouge1_for_task1442_doqa_movies_isanswerable": 80.0, "predict_rougeL_for_task1442_doqa_movies_isanswerable": 80.0, "predict_f1_for_task1442_doqa_movies_isanswerable": 80.0, "predict_exact_match_for_task233_iirc_link_exists_classification": 40.0, "predict_rouge1_for_task233_iirc_link_exists_classification": 40.0, "predict_rougeL_for_task233_iirc_link_exists_classification": 40.0, "predict_f1_for_task233_iirc_link_exists_classification": 40.0, "predict_exact_match_for_task936_defeasible_nli_snli_classification": 40.0, "predict_rouge1_for_task936_defeasible_nli_snli_classification": 40.0, "predict_rougeL_for_task936_defeasible_nli_snli_classification": 40.0, "predict_f1_for_task936_defeasible_nli_snli_classification": 40.0, "predict_exact_match_for_task1386_anli_r2_entailment": 0.0, "predict_rouge1_for_task1386_anli_r2_entailment": 40.0, "predict_rougeL_for_task1386_anli_r2_entailment": 40.0, "predict_f1_for_task1386_anli_r2_entailment": 0.0, "predict_exact_match_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_rouge1_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_rougeL_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_f1_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_exact_match_for_task290_tellmewhy_question_answerability": 60.0, "predict_rouge1_for_task290_tellmewhy_question_answerability": 86.6667, "predict_rougeL_for_task290_tellmewhy_question_answerability": 86.6667, "predict_f1_for_task290_tellmewhy_question_answerability": 86.6667, "predict_exact_match_for_task304_numeric_fused_head_resolution": 0.0, "predict_rouge1_for_task304_numeric_fused_head_resolution": 13.3333, "predict_rougeL_for_task304_numeric_fused_head_resolution": 13.3333, "predict_f1_for_task304_numeric_fused_head_resolution": 13.3333, "predict_exact_match_for_task760_msr_sqa_long_text_generation": 0.0, "predict_rouge1_for_task760_msr_sqa_long_text_generation": 0.8103, "predict_rougeL_for_task760_msr_sqa_long_text_generation": 0.8103, "predict_f1_for_task760_msr_sqa_long_text_generation": 0.7946, "predict_exact_match_for_task035_winogrande_question_modification_person": 0.0, "predict_rouge1_for_task035_winogrande_question_modification_person": 58.9922, "predict_rougeL_for_task035_winogrande_question_modification_person": 58.9922, "predict_f1_for_task035_winogrande_question_modification_person": 58.9922, "predict_exact_match_for_task569_recipe_nlg_text_generation": 0.0, "predict_rouge1_for_task569_recipe_nlg_text_generation": 24.2424, "predict_rougeL_for_task569_recipe_nlg_text_generation": 20.6061, "predict_f1_for_task569_recipe_nlg_text_generation": 20.6061, "predict_exact_match_for_task391_causal_relationship": 40.0, "predict_rouge1_for_task391_causal_relationship": 80.0, "predict_rougeL_for_task391_causal_relationship": 80.0, "predict_f1_for_task391_causal_relationship": 80.0, "predict_exact_match_for_task891_gap_coreference_resolution": 40.0, "predict_rouge1_for_task891_gap_coreference_resolution": 50.0, "predict_rougeL_for_task891_gap_coreference_resolution": 50.0, "predict_f1_for_task891_gap_coreference_resolution": 50.0, "predict_exact_match_for_task1586_scifact_title_generation": 0.0, "predict_rouge1_for_task1586_scifact_title_generation": 17.3747, "predict_rougeL_for_task1586_scifact_title_generation": 17.3747, "predict_f1_for_task1586_scifact_title_generation": 16.1509, "predict_exact_match_for_task602_wikitext-103_answer_generation": 0.0, "predict_rouge1_for_task602_wikitext-103_answer_generation": 0.0, "predict_rougeL_for_task602_wikitext-103_answer_generation": 0.0, "predict_f1_for_task602_wikitext-103_answer_generation": 0.0, "predict_exact_match_for_task1195_disflqa_disfluent_to_fluent_conversion": 0.0, "predict_rouge1_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873, "predict_rougeL_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873, "predict_f1_for_task1195_disflqa_disfluent_to_fluent_conversion": 62.1678, "predict_exact_match_for_task1409_dart_text_generation": 0.0, "predict_rouge1_for_task1409_dart_text_generation": 41.9306, "predict_rougeL_for_task1409_dart_text_generation": 36.5007, "predict_f1_for_task1409_dart_text_generation": 39.9284, "predict_exact_match_for_task033_winogrande_answer_generation": 40.0, "predict_rouge1_for_task033_winogrande_answer_generation": 66.6667, "predict_rougeL_for_task033_winogrande_answer_generation": 66.6667, "predict_f1_for_task033_winogrande_answer_generation": 66.6667, "predict_exact_match_for_task1407_dart_question_generation": 0.0, "predict_rouge1_for_task1407_dart_question_generation": 24.086, "predict_rougeL_for_task1407_dart_question_generation": 20.8043, "predict_f1_for_task1407_dart_question_generation": 23.7903, "predict_exact_match_for_task402_grailqa_paraphrase_generation": 0.0, "predict_rouge1_for_task402_grailqa_paraphrase_generation": 60.8498, "predict_rougeL_for_task402_grailqa_paraphrase_generation": 55.8974, "predict_f1_for_task402_grailqa_paraphrase_generation": 58.1832, "predict_exact_match_for_task201_mnli_neutral_classification": 0.0, "predict_rouge1_for_task201_mnli_neutral_classification": 0.0, "predict_rougeL_for_task201_mnli_neutral_classification": 0.0, "predict_f1_for_task201_mnli_neutral_classification": 0.0, "predict_exact_match_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_rouge1_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_rougeL_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_f1_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_exact_match_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_rouge1_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_rougeL_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_f1_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_exact_match_for_task828_copa_commonsense_cause_effect": 60.0, "predict_rouge1_for_task828_copa_commonsense_cause_effect": 60.0, "predict_rougeL_for_task828_copa_commonsense_cause_effect": 60.0, "predict_f1_for_task828_copa_commonsense_cause_effect": 60.0, "predict_exact_match_for_task769_qed_summarization": 100.0, "predict_rouge1_for_task769_qed_summarization": 100.0, "predict_rougeL_for_task769_qed_summarization": 100.0, "predict_f1_for_task769_qed_summarization": 100.0, "predict_exact_match_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_rouge1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_rougeL_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_f1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_exact_match_for_task1385_anli_r1_entailment": 0.0, "predict_rouge1_for_task1385_anli_r1_entailment": 20.0, "predict_rougeL_for_task1385_anli_r1_entailment": 20.0, "predict_f1_for_task1385_anli_r1_entailment": 0.0, "predict_exact_match_for_task1531_daily_dialog_type_classification": 40.0, "predict_rouge1_for_task1531_daily_dialog_type_classification": 40.0, "predict_rougeL_for_task1531_daily_dialog_type_classification": 40.0, "predict_f1_for_task1531_daily_dialog_type_classification": 40.0, "predict_exact_match_for_task1516_imppres_naturallanguageinference": 20.0, "predict_rouge1_for_task1516_imppres_naturallanguageinference": 20.0, "predict_rougeL_for_task1516_imppres_naturallanguageinference": 20.0, "predict_f1_for_task1516_imppres_naturallanguageinference": 20.0, "predict_exact_match_for_task1394_meta_woz_task_classification": 80.0, "predict_rouge1_for_task1394_meta_woz_task_classification": 93.3333, "predict_rougeL_for_task1394_meta_woz_task_classification": 93.3333, "predict_f1_for_task1394_meta_woz_task_classification": 80.0, "predict_exact_match_for_task401_numeric_fused_head_reference": 20.0, "predict_rouge1_for_task401_numeric_fused_head_reference": 43.3333, "predict_rougeL_for_task401_numeric_fused_head_reference": 43.3333, "predict_f1_for_task401_numeric_fused_head_reference": 43.3333, "predict_exact_match_for_task1598_nyc_long_text_generation": 0.0, "predict_rouge1_for_task1598_nyc_long_text_generation": 27.0, "predict_rougeL_for_task1598_nyc_long_text_generation": 25.8889, "predict_f1_for_task1598_nyc_long_text_generation": 27.3109, "predict_exact_match_for_task1615_sick_tclassify_b_relation_a": 60.0, "predict_rouge1_for_task1615_sick_tclassify_b_relation_a": 86.6667, "predict_rougeL_for_task1615_sick_tclassify_b_relation_a": 86.6667, "predict_f1_for_task1615_sick_tclassify_b_relation_a": 60.0, "predict_exact_match_for_task970_sherliic_causal_relationship": 100.0, "predict_rouge1_for_task970_sherliic_causal_relationship": 100.0, "predict_rougeL_for_task970_sherliic_causal_relationship": 100.0, "predict_f1_for_task970_sherliic_causal_relationship": 100.0, "predict_exact_match_for_task1390_wscfixed_coreference": 60.0, "predict_rouge1_for_task1390_wscfixed_coreference": 60.0, "predict_rougeL_for_task1390_wscfixed_coreference": 60.0, "predict_f1_for_task1390_wscfixed_coreference": 60.0, "predict_exact_match_for_task199_mnli_classification": 100.0, "predict_rouge1_for_task199_mnli_classification": 100.0, "predict_rougeL_for_task199_mnli_classification": 100.0, "predict_f1_for_task199_mnli_classification": 100.0, "predict_exact_match_for_task034_winogrande_question_modification_object": 0.0, "predict_rouge1_for_task034_winogrande_question_modification_object": 61.7974, "predict_rougeL_for_task034_winogrande_question_modification_object": 61.7974, "predict_f1_for_task034_winogrande_question_modification_object": 61.7974, "predict_exact_match_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_rouge1_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_rougeL_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_f1_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_exact_match_for_task226_english_language_answer_relevance_classification": 40.0, "predict_rouge1_for_task226_english_language_answer_relevance_classification": 40.0, "predict_rougeL_for_task226_english_language_answer_relevance_classification": 40.0, "predict_f1_for_task226_english_language_answer_relevance_classification": 40.0, "predict_exact_match_for_task510_reddit_tifu_title_summarization": 0.0, "predict_rouge1_for_task510_reddit_tifu_title_summarization": 39.811, "predict_rougeL_for_task510_reddit_tifu_title_summarization": 39.811, "predict_f1_for_task510_reddit_tifu_title_summarization": 38.265, "predict_exact_match_for_task935_defeasible_nli_atomic_classification": 40.0, "predict_rouge1_for_task935_defeasible_nli_atomic_classification": 60.0, "predict_rougeL_for_task935_defeasible_nli_atomic_classification": 60.0, "predict_f1_for_task935_defeasible_nli_atomic_classification": 40.0, "predict_exact_match_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_rouge1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_rougeL_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_f1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_exact_match_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_rouge1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_rougeL_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_f1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_exact_match_for_task937_defeasible_nli_social_classification": 20.0, "predict_rouge1_for_task937_defeasible_nli_social_classification": 20.0, "predict_rougeL_for_task937_defeasible_nli_social_classification": 20.0, "predict_f1_for_task937_defeasible_nli_social_classification": 20.0, "predict_exact_match_for_task743_eurlex_summarization": 0.0, "predict_rouge1_for_task743_eurlex_summarization": 31.4423, "predict_rougeL_for_task743_eurlex_summarization": 26.0256, "predict_f1_for_task743_eurlex_summarization": 27.1719, "predict_exact_match_for_task1388_cb_entailment": 0.0, "predict_rouge1_for_task1388_cb_entailment": 40.0, "predict_rougeL_for_task1388_cb_entailment": 40.0, "predict_f1_for_task1388_cb_entailment": 0.0, "predict_exact_match_for_task671_ambigqa_text_generation": 0.0, "predict_rouge1_for_task671_ambigqa_text_generation": 62.4536, "predict_rougeL_for_task671_ambigqa_text_generation": 59.3766, "predict_f1_for_task671_ambigqa_text_generation": 62.4536, "predict_exact_match_for_task121_zest_text_modification": 0.0, "predict_rouge1_for_task121_zest_text_modification": 47.5745, "predict_rougeL_for_task121_zest_text_modification": 41.2253, "predict_f1_for_task121_zest_text_modification": 42.083, "predict_exact_match_for_task1345_glue_qqp_question_paraprashing": 0.0, "predict_rouge1_for_task1345_glue_qqp_question_paraprashing": 21.3333, "predict_rougeL_for_task1345_glue_qqp_question_paraprashing": 21.3333, "predict_f1_for_task1345_glue_qqp_question_paraprashing": 21.6667, "predict_exact_match_for_task330_gap_answer_generation": 80.0, "predict_rouge1_for_task330_gap_answer_generation": 97.1429, "predict_rougeL_for_task330_gap_answer_generation": 97.1429, "predict_f1_for_task330_gap_answer_generation": 96.0, "predict_exact_match_for_task1342_amazon_us_reviews_title": 0.0, "predict_rouge1_for_task1342_amazon_us_reviews_title": 8.9697, "predict_rougeL_for_task1342_amazon_us_reviews_title": 6.303, "predict_f1_for_task1342_amazon_us_reviews_title": 8.9697, "predict_exact_match_for_task329_gap_classification": 60.0, "predict_rouge1_for_task329_gap_classification": 60.0, "predict_rougeL_for_task329_gap_classification": 60.0, "predict_f1_for_task329_gap_classification": 60.0, "predict_exact_match_for_task281_points_of_correspondence": 0.0, "predict_rouge1_for_task281_points_of_correspondence": 26.0813, "predict_rougeL_for_task281_points_of_correspondence": 25.248, "predict_f1_for_task281_points_of_correspondence": 27.6283, "predict_exact_match_for_task036_qasc_topic_word_to_generate_related_fact": 40.0, "predict_rouge1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_rougeL_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_f1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_exact_match_for_task1554_scitail_classification": 40.0, "predict_rouge1_for_task1554_scitail_classification": 40.0, "predict_rougeL_for_task1554_scitail_classification": 40.0, "predict_f1_for_task1554_scitail_classification": 40.0, "predict_exact_match_for_task050_multirc_answerability": 100.0, "predict_rouge1_for_task050_multirc_answerability": 100.0, "predict_rougeL_for_task050_multirc_answerability": 100.0, "predict_f1_for_task050_multirc_answerability": 100.0, "predict_exact_match_for_task362_spolin_yesand_prompt_response_sub_classification": 20.0, "predict_rouge1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_rougeL_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_f1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_exact_match_for_task1557_jfleg_answer_generation": 20.0, "predict_rouge1_for_task1557_jfleg_answer_generation": 73.7928, "predict_rougeL_for_task1557_jfleg_answer_generation": 73.7928, "predict_f1_for_task1557_jfleg_answer_generation": 71.7726, "predict_exact_match_for_task249_enhanced_wsc_pronoun_disambiguation": 20.0, "predict_rouge1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_rougeL_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_f1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_exact_match_for_task957_e2e_nlg_text_generation_generate": 0.0, "predict_rouge1_for_task957_e2e_nlg_text_generation_generate": 52.5426, "predict_rougeL_for_task957_e2e_nlg_text_generation_generate": 43.0283, "predict_f1_for_task957_e2e_nlg_text_generation_generate": 52.8436, "predict_exact_match_for_task418_persent_title_generation": 0.0, "predict_rouge1_for_task418_persent_title_generation": 15.3458, "predict_rougeL_for_task418_persent_title_generation": 10.6399, "predict_f1_for_task418_persent_title_generation": 15.3458, "predict_exact_match_for_task614_glucose_cause_event_detection": 0.0, "predict_rouge1_for_task614_glucose_cause_event_detection": 20.673, "predict_rougeL_for_task614_glucose_cause_event_detection": 20.673, "predict_f1_for_task614_glucose_cause_event_detection": 22.7647, "predict_exact_match_for_task677_ollie_sentence_answer_generation": 0.0, "predict_rouge1_for_task677_ollie_sentence_answer_generation": 28.5268, "predict_rougeL_for_task677_ollie_sentence_answer_generation": 28.5268, "predict_f1_for_task677_ollie_sentence_answer_generation": 27.0794, "predict_exact_match_for_task220_rocstories_title_classification": 100.0, "predict_rouge1_for_task220_rocstories_title_classification": 100.0, "predict_rougeL_for_task220_rocstories_title_classification": 100.0, "predict_f1_for_task220_rocstories_title_classification": 100.0, "predict_exact_match_for_task1631_openpi_answer_generation": 0.0, "predict_rouge1_for_task1631_openpi_answer_generation": 77.0305, "predict_rougeL_for_task1631_openpi_answer_generation": 74.5305, "predict_f1_for_task1631_openpi_answer_generation": 77.0305, "predict_exact_match_for_task232_iirc_link_number_classification": 60.0, "predict_rouge1_for_task232_iirc_link_number_classification": 60.0, "predict_rougeL_for_task232_iirc_link_number_classification": 60.0, "predict_f1_for_task232_iirc_link_number_classification": 60.0, "predict_exact_match_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_rouge1_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_rougeL_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_f1_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_exact_match_for_task1358_xlsum_title_generation": 0.0, "predict_rouge1_for_task1358_xlsum_title_generation": 26.563, "predict_rougeL_for_task1358_xlsum_title_generation": 24.2101, "predict_f1_for_task1358_xlsum_title_generation": 27.1161, "predict_exact_match_for_task1533_daily_dialog_formal_classification": 60.0, "predict_rouge1_for_task1533_daily_dialog_formal_classification": 60.0, "predict_rougeL_for_task1533_daily_dialog_formal_classification": 60.0, "predict_f1_for_task1533_daily_dialog_formal_classification": 60.0, "predict_exact_match_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_rouge1_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_rougeL_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_f1_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_exact_match_for_task1659_title_generation": 0.0, "predict_rouge1_for_task1659_title_generation": 27.5046, "predict_rougeL_for_task1659_title_generation": 22.3799, "predict_f1_for_task1659_title_generation": 25.0379, "predict_exact_match_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_rouge1_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_rougeL_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_f1_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_exact_match_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_rouge1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_rougeL_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_f1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_exact_match_for_task827_copa_commonsense_reasoning": 80.0, "predict_rouge1_for_task827_copa_commonsense_reasoning": 80.0, "predict_rougeL_for_task827_copa_commonsense_reasoning": 80.0, "predict_f1_for_task827_copa_commonsense_reasoning": 80.0, "predict_exact_match_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_rouge1_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_rougeL_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_f1_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_exact_match_for_task393_plausible_result_generation": 0.0, "predict_rouge1_for_task393_plausible_result_generation": 38.1587, "predict_rougeL_for_task393_plausible_result_generation": 38.1587, "predict_f1_for_task393_plausible_result_generation": 38.1587, "predict_exact_match_for_task879_schema_guided_dstc8_classification": 100.0, "predict_rouge1_for_task879_schema_guided_dstc8_classification": 100.0, "predict_rougeL_for_task879_schema_guided_dstc8_classification": 100.0, "predict_f1_for_task879_schema_guided_dstc8_classification": 100.0, "predict_exact_match_for_task613_politifact_text_generation": 0.0, "predict_rouge1_for_task613_politifact_text_generation": 33.3333, "predict_rougeL_for_task613_politifact_text_generation": 33.3333, "predict_f1_for_task613_politifact_text_generation": 13.3333, "predict_exact_match_for_task219_rocstories_title_answer_generation": 0.0, "predict_rouge1_for_task219_rocstories_title_answer_generation": 20.7143, "predict_rougeL_for_task219_rocstories_title_answer_generation": 20.7143, "predict_f1_for_task219_rocstories_title_answer_generation": 15.7143, "predict_exact_match_for_task190_snli_classification": 40.0, "predict_rouge1_for_task190_snli_classification": 40.0, "predict_rougeL_for_task190_snli_classification": 40.0, "predict_f1_for_task190_snli_classification": 40.0, "predict_exact_match_for_task200_mnli_entailment_classification": 80.0, "predict_rouge1_for_task200_mnli_entailment_classification": 80.0, "predict_rougeL_for_task200_mnli_entailment_classification": 80.0, "predict_f1_for_task200_mnli_entailment_classification": 80.0, "predict_exact_match_for_task1534_daily_dialog_question_classification": 40.0, "predict_rouge1_for_task1534_daily_dialog_question_classification": 40.0, "predict_rougeL_for_task1534_daily_dialog_question_classification": 40.0, "predict_f1_for_task1534_daily_dialog_question_classification": 40.0, "predict_exact_match_for_task1540_parsed_pdfs_summarization": 0.0, "predict_rouge1_for_task1540_parsed_pdfs_summarization": 28.0519, "predict_rougeL_for_task1540_parsed_pdfs_summarization": 28.0519, "predict_f1_for_task1540_parsed_pdfs_summarization": 25.1948, "predict_exact_match_for_task442_com_qa_paraphrase_question_generation": 0.0, "predict_rouge1_for_task442_com_qa_paraphrase_question_generation": 66.8333, "predict_rougeL_for_task442_com_qa_paraphrase_question_generation": 64.3333, "predict_f1_for_task442_com_qa_paraphrase_question_generation": 64.9744, "predict_exact_match_for_task392_inverse_causal_relationship": 80.0, "predict_rouge1_for_task392_inverse_causal_relationship": 93.3333, "predict_rougeL_for_task392_inverse_causal_relationship": 93.3333, "predict_f1_for_task392_inverse_causal_relationship": 93.3333, "predict_exact_match_for_task1562_zest_text_modification": 0.0, "predict_rouge1_for_task1562_zest_text_modification": 47.5095, "predict_rougeL_for_task1562_zest_text_modification": 38.5095, "predict_f1_for_task1562_zest_text_modification": 45.6913, "predict_exact_match_for_task640_esnli_classification": 40.0, "predict_rouge1_for_task640_esnli_classification": 40.0, "predict_rougeL_for_task640_esnli_classification": 40.0, "predict_f1_for_task640_esnli_classification": 40.0, "predict_exact_match_for_task1622_disfl_qa_text_modication": 0.0, "predict_rouge1_for_task1622_disfl_qa_text_modication": 66.5018, "predict_rougeL_for_task1622_disfl_qa_text_modication": 57.2711, "predict_f1_for_task1622_disfl_qa_text_modication": 64.3711, "predict_exact_match_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_rouge1_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_rougeL_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_f1_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_exact_match_for_task020_mctaco_span_based_question": 40.0, "predict_rouge1_for_task020_mctaco_span_based_question": 40.0, "predict_rougeL_for_task020_mctaco_span_based_question": 40.0, "predict_f1_for_task020_mctaco_span_based_question": 40.0, "predict_exact_match_for_task642_esnli_classification": 40.0, "predict_rouge1_for_task642_esnli_classification": 40.0, "predict_rougeL_for_task642_esnli_classification": 40.0, "predict_f1_for_task642_esnli_classification": 40.0, "predict_exact_match_for_task102_commongen_sentence_generation": 0.0, "predict_rouge1_for_task102_commongen_sentence_generation": 60.8462, "predict_rougeL_for_task102_commongen_sentence_generation": 47.6154, "predict_f1_for_task102_commongen_sentence_generation": 49.0256, "predict_exact_match_for_title_generation": 11.1111, "predict_rouge1_for_title_generation": 30.5218, "predict_rougeL_for_title_generation": 28.8976, "predict_f1_for_title_generation": 27.9279, "predict_exact_match_for_coreference_resolution": 45.7143, "predict_rouge1_for_coreference_resolution": 56.9388, "predict_rougeL_for_coreference_resolution": 56.9388, "predict_f1_for_coreference_resolution": 56.8571, "predict_exact_match_for_textual_entailment": 40.8333, "predict_rouge1_for_textual_entailment": 47.7778, "predict_rougeL_for_textual_entailment": 47.7778, "predict_f1_for_textual_entailment": 40.8333, "predict_exact_match_for_question_rewriting": 0.0, "predict_rouge1_for_question_rewriting": 57.6828, "predict_rougeL_for_question_rewriting": 54.491, "predict_f1_for_question_rewriting": 56.1871, "predict_exact_match_for_cause_effect_classification": 51.4286, "predict_rouge1_for_cause_effect_classification": 67.4521, "predict_rougeL_for_cause_effect_classification": 67.4521, "predict_f1_for_cause_effect_classification": 67.751, "predict_exact_match_for_dialogue_act_recognition": 51.4286, "predict_rouge1_for_dialogue_act_recognition": 61.9048, "predict_rougeL_for_dialogue_act_recognition": 61.9048, "predict_f1_for_dialogue_act_recognition": 54.2857, "predict_exact_match_for_answerability_classification": 67.6923, "predict_rouge1_for_answerability_classification": 69.7436, "predict_rougeL_for_answerability_classification": 69.7436, "predict_f1_for_answerability_classification": 69.7436, "predict_exact_match_for_keyword_tagging": 44.0, "predict_rouge1_for_keyword_tagging": 64.1333, "predict_rougeL_for_keyword_tagging": 64.1333, "predict_f1_for_keyword_tagging": 60.1333, "predict_exact_match_for_data_to_text": 0.0, "predict_rouge1_for_data_to_text": 39.3261, "predict_rougeL_for_data_to_text": 34.4403, "predict_f1_for_data_to_text": 37.7596, "predict_exact_match_for_word_analogy": 47.5, "predict_rouge1_for_word_analogy": 47.5, "predict_rougeL_for_word_analogy": 47.5, "predict_f1_for_word_analogy": 47.5, "predict_exact_match_for_overlap_extraction": 10.0, "predict_rouge1_for_overlap_extraction": 23.0406, "predict_rougeL_for_overlap_extraction": 22.624, "predict_f1_for_overlap_extraction": 23.8141, "predict_exact_match_for_grammar_error_correction": 20.0, "predict_rouge1_for_grammar_error_correction": 73.7928, "predict_rougeL_for_grammar_error_correction": 73.7928, "predict_f1_for_grammar_error_correction": 71.7726, "predict_gen_len": 4.8739, "predict_global_step": 17980, "predict_runtime": 99.8176, "predict_samples_per_second": 5.961, "predict_steps_per_second": 0.1, "predict_samples": 595, "epoch": 2.0}], "train_runtime": 191097.123, "train_samples_per_second": 0.753, "train_steps_per_second": 0.094, "train_loss": 0.7423103578629032, "epoch": 2.0, "train_samples": 71917, "eval_global_step": 17980, "eval_runtime": 0.0055, "eval_samples_per_second": 0.0, "eval_steps_per_second": 0.0, "eval_samples": 0, "predict_loss": 1.446231484413147, "predict_exact_match": 34.1176, "predict_rouge1": 51.3811, "predict_rougeL": 50.4639, "predict_f1": 48.7192, "predict_exact_match_for_task1356_xlsum_title_generation": 0.0, "predict_rouge1_for_task1356_xlsum_title_generation": 18.6667, "predict_rougeL_for_task1356_xlsum_title_generation": 16.0, "predict_f1_for_task1356_xlsum_title_generation": 16.0, "predict_exact_match_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_rouge1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_rougeL_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_f1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0, "predict_exact_match_for_task641_esnli_classification": 60.0, "predict_rouge1_for_task641_esnli_classification": 60.0, "predict_rougeL_for_task641_esnli_classification": 60.0, "predict_f1_for_task641_esnli_classification": 60.0, "predict_exact_match_for_task1529_scitail1.1_classification": 40.0, "predict_rouge1_for_task1529_scitail1.1_classification": 40.0, "predict_rougeL_for_task1529_scitail1.1_classification": 40.0, "predict_f1_for_task1529_scitail1.1_classification": 40.0, "predict_exact_match_for_task202_mnli_contradiction_classification": 80.0, "predict_rouge1_for_task202_mnli_contradiction_classification": 80.0, "predict_rougeL_for_task202_mnli_contradiction_classification": 80.0, "predict_f1_for_task202_mnli_contradiction_classification": 80.0, "predict_exact_match_for_task670_ambigqa_question_generation": 0.0, "predict_rouge1_for_task670_ambigqa_question_generation": 75.6778, "predict_rougeL_for_task670_ambigqa_question_generation": 75.6778, "predict_f1_for_task670_ambigqa_question_generation": 75.6778, "predict_exact_match_for_task1393_superglue_copa_text_completion": 100.0, "predict_rouge1_for_task1393_superglue_copa_text_completion": 100.0, "predict_rougeL_for_task1393_superglue_copa_text_completion": 100.0, "predict_f1_for_task1393_superglue_copa_text_completion": 100.0, "predict_exact_match_for_task1344_glue_entailment_classification": 40.0, "predict_rouge1_for_task1344_glue_entailment_classification": 40.0, "predict_rougeL_for_task1344_glue_entailment_classification": 40.0, "predict_f1_for_task1344_glue_entailment_classification": 40.0, "predict_exact_match_for_task288_gigaword_summarization": 0.0, "predict_rouge1_for_task288_gigaword_summarization": 23.8571, "predict_rougeL_for_task288_gigaword_summarization": 23.8571, "predict_f1_for_task288_gigaword_summarization": 10.3571, "predict_exact_match_for_task1387_anli_r3_entailment": 0.0, "predict_rouge1_for_task1387_anli_r3_entailment": 20.0, "predict_rougeL_for_task1387_anli_r3_entailment": 20.0, "predict_f1_for_task1387_anli_r3_entailment": 0.0, "predict_exact_match_for_task1664_winobias_text_generation": 60.0, "predict_rouge1_for_task1664_winobias_text_generation": 80.0, "predict_rougeL_for_task1664_winobias_text_generation": 80.0, "predict_f1_for_task1664_winobias_text_generation": 80.0, "predict_exact_match_for_task1161_coda19_title_generation": 0.0, "predict_rouge1_for_task1161_coda19_title_generation": 25.5618, "predict_rougeL_for_task1161_coda19_title_generation": 22.8951, "predict_f1_for_task1161_coda19_title_generation": 19.4872, "predict_exact_match_for_task880_schema_guided_dstc8_classification": 20.0, "predict_rouge1_for_task880_schema_guided_dstc8_classification": 60.0, "predict_rougeL_for_task880_schema_guided_dstc8_classification": 60.0, "predict_f1_for_task880_schema_guided_dstc8_classification": 20.0, "predict_exact_match_for_task738_perspectrum_classification": 60.0, "predict_rouge1_for_task738_perspectrum_classification": 60.0, "predict_rougeL_for_task738_perspectrum_classification": 60.0, "predict_f1_for_task738_perspectrum_classification": 60.0, "predict_exact_match_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_rouge1_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_rougeL_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_f1_for_task1439_doqa_cooking_isanswerable": 40.0, "predict_exact_match_for_task645_summarization": 100.0, "predict_rouge1_for_task645_summarization": 100.0, "predict_rougeL_for_task645_summarization": 100.0, "predict_f1_for_task645_summarization": 100.0, "predict_exact_match_for_task619_ohsumed_abstract_title_generation": 0.0, "predict_rouge1_for_task619_ohsumed_abstract_title_generation": 38.4305, "predict_rougeL_for_task619_ohsumed_abstract_title_generation": 38.4305, "predict_f1_for_task619_ohsumed_abstract_title_generation": 34.2082, "predict_exact_match_for_task1728_web_nlg_data_to_text": 0.0, "predict_rouge1_for_task1728_web_nlg_data_to_text": 41.1615, "predict_rougeL_for_task1728_web_nlg_data_to_text": 32.2577, "predict_f1_for_task1728_web_nlg_data_to_text": 42.0333, "predict_exact_match_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_rouge1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_rougeL_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_f1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0, "predict_exact_match_for_task648_answer_generation": 0.0, "predict_rouge1_for_task648_answer_generation": 13.3333, "predict_rougeL_for_task648_answer_generation": 13.3333, "predict_f1_for_task648_answer_generation": 13.3333, "predict_exact_match_for_task242_tweetqa_classification": 80.0, "predict_rouge1_for_task242_tweetqa_classification": 80.0, "predict_rougeL_for_task242_tweetqa_classification": 80.0, "predict_f1_for_task242_tweetqa_classification": 80.0, "predict_exact_match_for_task620_ohsumed_medical_subject_headings_answer_generation": 0.0, "predict_rouge1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_rougeL_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_f1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333, "predict_exact_match_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_rouge1_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_rougeL_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_f1_for_task1159_bard_analogical_reasoning_containers": 60.0, "predict_exact_match_for_task500_scruples_anecdotes_title_generation": 0.0, "predict_rouge1_for_task500_scruples_anecdotes_title_generation": 2.8571, "predict_rougeL_for_task500_scruples_anecdotes_title_generation": 2.8571, "predict_f1_for_task500_scruples_anecdotes_title_generation": 3.0769, "predict_exact_match_for_task890_gcwd_classification": 40.0, "predict_rouge1_for_task890_gcwd_classification": 40.0, "predict_rougeL_for_task890_gcwd_classification": 40.0, "predict_f1_for_task890_gcwd_classification": 40.0, "predict_exact_match_for_task039_qasc_find_overlapping_words": 20.0, "predict_rouge1_for_task039_qasc_find_overlapping_words": 20.0, "predict_rougeL_for_task039_qasc_find_overlapping_words": 20.0, "predict_f1_for_task039_qasc_find_overlapping_words": 20.0, "predict_exact_match_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_rouge1_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_rougeL_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_f1_for_task1154_bard_analogical_reasoning_travel": 40.0, "predict_exact_match_for_task1612_sick_label_classification": 40.0, "predict_rouge1_for_task1612_sick_label_classification": 40.0, "predict_rougeL_for_task1612_sick_label_classification": 40.0, "predict_f1_for_task1612_sick_label_classification": 40.0, "predict_exact_match_for_task1442_doqa_movies_isanswerable": 80.0, "predict_rouge1_for_task1442_doqa_movies_isanswerable": 80.0, "predict_rougeL_for_task1442_doqa_movies_isanswerable": 80.0, "predict_f1_for_task1442_doqa_movies_isanswerable": 80.0, "predict_exact_match_for_task233_iirc_link_exists_classification": 40.0, "predict_rouge1_for_task233_iirc_link_exists_classification": 40.0, "predict_rougeL_for_task233_iirc_link_exists_classification": 40.0, "predict_f1_for_task233_iirc_link_exists_classification": 40.0, "predict_exact_match_for_task936_defeasible_nli_snli_classification": 40.0, "predict_rouge1_for_task936_defeasible_nli_snli_classification": 40.0, "predict_rougeL_for_task936_defeasible_nli_snli_classification": 40.0, "predict_f1_for_task936_defeasible_nli_snli_classification": 40.0, "predict_exact_match_for_task1386_anli_r2_entailment": 0.0, "predict_rouge1_for_task1386_anli_r2_entailment": 40.0, "predict_rougeL_for_task1386_anli_r2_entailment": 40.0, "predict_f1_for_task1386_anli_r2_entailment": 0.0, "predict_exact_match_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_rouge1_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_rougeL_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_f1_for_task1152_bard_analogical_reasoning_causation": 20.0, "predict_exact_match_for_task290_tellmewhy_question_answerability": 60.0, "predict_rouge1_for_task290_tellmewhy_question_answerability": 86.6667, "predict_rougeL_for_task290_tellmewhy_question_answerability": 86.6667, "predict_f1_for_task290_tellmewhy_question_answerability": 86.6667, "predict_exact_match_for_task304_numeric_fused_head_resolution": 0.0, "predict_rouge1_for_task304_numeric_fused_head_resolution": 13.3333, "predict_rougeL_for_task304_numeric_fused_head_resolution": 13.3333, "predict_f1_for_task304_numeric_fused_head_resolution": 13.3333, "predict_exact_match_for_task760_msr_sqa_long_text_generation": 0.0, "predict_rouge1_for_task760_msr_sqa_long_text_generation": 0.8103, "predict_rougeL_for_task760_msr_sqa_long_text_generation": 0.8103, "predict_f1_for_task760_msr_sqa_long_text_generation": 0.7946, "predict_exact_match_for_task035_winogrande_question_modification_person": 0.0, "predict_rouge1_for_task035_winogrande_question_modification_person": 58.9922, "predict_rougeL_for_task035_winogrande_question_modification_person": 58.9922, "predict_f1_for_task035_winogrande_question_modification_person": 58.9922, "predict_exact_match_for_task569_recipe_nlg_text_generation": 0.0, "predict_rouge1_for_task569_recipe_nlg_text_generation": 24.2424, "predict_rougeL_for_task569_recipe_nlg_text_generation": 20.6061, "predict_f1_for_task569_recipe_nlg_text_generation": 20.6061, "predict_exact_match_for_task391_causal_relationship": 40.0, "predict_rouge1_for_task391_causal_relationship": 80.0, "predict_rougeL_for_task391_causal_relationship": 80.0, "predict_f1_for_task391_causal_relationship": 80.0, "predict_exact_match_for_task891_gap_coreference_resolution": 40.0, "predict_rouge1_for_task891_gap_coreference_resolution": 50.0, "predict_rougeL_for_task891_gap_coreference_resolution": 50.0, "predict_f1_for_task891_gap_coreference_resolution": 50.0, "predict_exact_match_for_task1586_scifact_title_generation": 0.0, "predict_rouge1_for_task1586_scifact_title_generation": 17.3747, "predict_rougeL_for_task1586_scifact_title_generation": 17.3747, "predict_f1_for_task1586_scifact_title_generation": 16.1509, "predict_exact_match_for_task602_wikitext-103_answer_generation": 0.0, "predict_rouge1_for_task602_wikitext-103_answer_generation": 0.0, "predict_rougeL_for_task602_wikitext-103_answer_generation": 0.0, "predict_f1_for_task602_wikitext-103_answer_generation": 0.0, "predict_exact_match_for_task1195_disflqa_disfluent_to_fluent_conversion": 0.0, "predict_rouge1_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873, "predict_rougeL_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873, "predict_f1_for_task1195_disflqa_disfluent_to_fluent_conversion": 62.1678, "predict_exact_match_for_task1409_dart_text_generation": 0.0, "predict_rouge1_for_task1409_dart_text_generation": 41.9306, "predict_rougeL_for_task1409_dart_text_generation": 36.5007, "predict_f1_for_task1409_dart_text_generation": 39.9284, "predict_exact_match_for_task033_winogrande_answer_generation": 40.0, "predict_rouge1_for_task033_winogrande_answer_generation": 66.6667, "predict_rougeL_for_task033_winogrande_answer_generation": 66.6667, "predict_f1_for_task033_winogrande_answer_generation": 66.6667, "predict_exact_match_for_task1407_dart_question_generation": 0.0, "predict_rouge1_for_task1407_dart_question_generation": 24.086, "predict_rougeL_for_task1407_dart_question_generation": 20.8043, "predict_f1_for_task1407_dart_question_generation": 23.7903, "predict_exact_match_for_task402_grailqa_paraphrase_generation": 0.0, "predict_rouge1_for_task402_grailqa_paraphrase_generation": 60.8498, "predict_rougeL_for_task402_grailqa_paraphrase_generation": 55.8974, "predict_f1_for_task402_grailqa_paraphrase_generation": 58.1832, "predict_exact_match_for_task201_mnli_neutral_classification": 0.0, "predict_rouge1_for_task201_mnli_neutral_classification": 0.0, "predict_rougeL_for_task201_mnli_neutral_classification": 0.0, "predict_f1_for_task201_mnli_neutral_classification": 0.0, "predict_exact_match_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_rouge1_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_rougeL_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_f1_for_task520_aquamuse_answer_given_in_passage": 100.0, "predict_exact_match_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_rouge1_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_rougeL_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_f1_for_task892_gap_reverse_coreference_resolution": 40.0, "predict_exact_match_for_task828_copa_commonsense_cause_effect": 60.0, "predict_rouge1_for_task828_copa_commonsense_cause_effect": 60.0, "predict_rougeL_for_task828_copa_commonsense_cause_effect": 60.0, "predict_f1_for_task828_copa_commonsense_cause_effect": 60.0, "predict_exact_match_for_task769_qed_summarization": 100.0, "predict_rouge1_for_task769_qed_summarization": 100.0, "predict_rougeL_for_task769_qed_summarization": 100.0, "predict_f1_for_task769_qed_summarization": 100.0, "predict_exact_match_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_rouge1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_rougeL_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_f1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0, "predict_exact_match_for_task1385_anli_r1_entailment": 0.0, "predict_rouge1_for_task1385_anli_r1_entailment": 20.0, "predict_rougeL_for_task1385_anli_r1_entailment": 20.0, "predict_f1_for_task1385_anli_r1_entailment": 0.0, "predict_exact_match_for_task1531_daily_dialog_type_classification": 40.0, "predict_rouge1_for_task1531_daily_dialog_type_classification": 40.0, "predict_rougeL_for_task1531_daily_dialog_type_classification": 40.0, "predict_f1_for_task1531_daily_dialog_type_classification": 40.0, "predict_exact_match_for_task1516_imppres_naturallanguageinference": 20.0, "predict_rouge1_for_task1516_imppres_naturallanguageinference": 20.0, "predict_rougeL_for_task1516_imppres_naturallanguageinference": 20.0, "predict_f1_for_task1516_imppres_naturallanguageinference": 20.0, "predict_exact_match_for_task1394_meta_woz_task_classification": 80.0, "predict_rouge1_for_task1394_meta_woz_task_classification": 93.3333, "predict_rougeL_for_task1394_meta_woz_task_classification": 93.3333, "predict_f1_for_task1394_meta_woz_task_classification": 80.0, "predict_exact_match_for_task401_numeric_fused_head_reference": 20.0, "predict_rouge1_for_task401_numeric_fused_head_reference": 43.3333, "predict_rougeL_for_task401_numeric_fused_head_reference": 43.3333, "predict_f1_for_task401_numeric_fused_head_reference": 43.3333, "predict_exact_match_for_task1598_nyc_long_text_generation": 0.0, "predict_rouge1_for_task1598_nyc_long_text_generation": 27.0, "predict_rougeL_for_task1598_nyc_long_text_generation": 25.8889, "predict_f1_for_task1598_nyc_long_text_generation": 27.3109, "predict_exact_match_for_task1615_sick_tclassify_b_relation_a": 60.0, "predict_rouge1_for_task1615_sick_tclassify_b_relation_a": 86.6667, "predict_rougeL_for_task1615_sick_tclassify_b_relation_a": 86.6667, "predict_f1_for_task1615_sick_tclassify_b_relation_a": 60.0, "predict_exact_match_for_task970_sherliic_causal_relationship": 100.0, "predict_rouge1_for_task970_sherliic_causal_relationship": 100.0, "predict_rougeL_for_task970_sherliic_causal_relationship": 100.0, "predict_f1_for_task970_sherliic_causal_relationship": 100.0, "predict_exact_match_for_task1390_wscfixed_coreference": 60.0, "predict_rouge1_for_task1390_wscfixed_coreference": 60.0, "predict_rougeL_for_task1390_wscfixed_coreference": 60.0, "predict_f1_for_task1390_wscfixed_coreference": 60.0, "predict_exact_match_for_task199_mnli_classification": 100.0, "predict_rouge1_for_task199_mnli_classification": 100.0, "predict_rougeL_for_task199_mnli_classification": 100.0, "predict_f1_for_task199_mnli_classification": 100.0, "predict_exact_match_for_task034_winogrande_question_modification_object": 0.0, "predict_rouge1_for_task034_winogrande_question_modification_object": 61.7974, "predict_rougeL_for_task034_winogrande_question_modification_object": 61.7974, "predict_f1_for_task034_winogrande_question_modification_object": 61.7974, "predict_exact_match_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_rouge1_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_rougeL_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_f1_for_task133_winowhy_reason_plausibility_detection": 20.0, "predict_exact_match_for_task226_english_language_answer_relevance_classification": 40.0, "predict_rouge1_for_task226_english_language_answer_relevance_classification": 40.0, "predict_rougeL_for_task226_english_language_answer_relevance_classification": 40.0, "predict_f1_for_task226_english_language_answer_relevance_classification": 40.0, "predict_exact_match_for_task510_reddit_tifu_title_summarization": 0.0, "predict_rouge1_for_task510_reddit_tifu_title_summarization": 39.811, "predict_rougeL_for_task510_reddit_tifu_title_summarization": 39.811, "predict_f1_for_task510_reddit_tifu_title_summarization": 38.265, "predict_exact_match_for_task935_defeasible_nli_atomic_classification": 40.0, "predict_rouge1_for_task935_defeasible_nli_atomic_classification": 60.0, "predict_rougeL_for_task935_defeasible_nli_atomic_classification": 60.0, "predict_f1_for_task935_defeasible_nli_atomic_classification": 40.0, "predict_exact_match_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_rouge1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_rougeL_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_f1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0, "predict_exact_match_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_rouge1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_rougeL_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_f1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0, "predict_exact_match_for_task937_defeasible_nli_social_classification": 20.0, "predict_rouge1_for_task937_defeasible_nli_social_classification": 20.0, "predict_rougeL_for_task937_defeasible_nli_social_classification": 20.0, "predict_f1_for_task937_defeasible_nli_social_classification": 20.0, "predict_exact_match_for_task743_eurlex_summarization": 0.0, "predict_rouge1_for_task743_eurlex_summarization": 31.4423, "predict_rougeL_for_task743_eurlex_summarization": 26.0256, "predict_f1_for_task743_eurlex_summarization": 27.1719, "predict_exact_match_for_task1388_cb_entailment": 0.0, "predict_rouge1_for_task1388_cb_entailment": 40.0, "predict_rougeL_for_task1388_cb_entailment": 40.0, "predict_f1_for_task1388_cb_entailment": 0.0, "predict_exact_match_for_task671_ambigqa_text_generation": 0.0, "predict_rouge1_for_task671_ambigqa_text_generation": 62.4536, "predict_rougeL_for_task671_ambigqa_text_generation": 59.3766, "predict_f1_for_task671_ambigqa_text_generation": 62.4536, "predict_exact_match_for_task121_zest_text_modification": 0.0, "predict_rouge1_for_task121_zest_text_modification": 47.5745, "predict_rougeL_for_task121_zest_text_modification": 41.2253, "predict_f1_for_task121_zest_text_modification": 42.083, "predict_exact_match_for_task1345_glue_qqp_question_paraprashing": 0.0, "predict_rouge1_for_task1345_glue_qqp_question_paraprashing": 21.3333, "predict_rougeL_for_task1345_glue_qqp_question_paraprashing": 21.3333, "predict_f1_for_task1345_glue_qqp_question_paraprashing": 21.6667, "predict_exact_match_for_task330_gap_answer_generation": 80.0, "predict_rouge1_for_task330_gap_answer_generation": 97.1429, "predict_rougeL_for_task330_gap_answer_generation": 97.1429, "predict_f1_for_task330_gap_answer_generation": 96.0, "predict_exact_match_for_task1342_amazon_us_reviews_title": 0.0, "predict_rouge1_for_task1342_amazon_us_reviews_title": 8.9697, "predict_rougeL_for_task1342_amazon_us_reviews_title": 6.303, "predict_f1_for_task1342_amazon_us_reviews_title": 8.9697, "predict_exact_match_for_task329_gap_classification": 60.0, "predict_rouge1_for_task329_gap_classification": 60.0, "predict_rougeL_for_task329_gap_classification": 60.0, "predict_f1_for_task329_gap_classification": 60.0, "predict_exact_match_for_task281_points_of_correspondence": 0.0, "predict_rouge1_for_task281_points_of_correspondence": 26.0813, "predict_rougeL_for_task281_points_of_correspondence": 25.248, "predict_f1_for_task281_points_of_correspondence": 27.6283, "predict_exact_match_for_task036_qasc_topic_word_to_generate_related_fact": 40.0, "predict_rouge1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_rougeL_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_f1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0, "predict_exact_match_for_task1554_scitail_classification": 40.0, "predict_rouge1_for_task1554_scitail_classification": 40.0, "predict_rougeL_for_task1554_scitail_classification": 40.0, "predict_f1_for_task1554_scitail_classification": 40.0, "predict_exact_match_for_task050_multirc_answerability": 100.0, "predict_rouge1_for_task050_multirc_answerability": 100.0, "predict_rougeL_for_task050_multirc_answerability": 100.0, "predict_f1_for_task050_multirc_answerability": 100.0, "predict_exact_match_for_task362_spolin_yesand_prompt_response_sub_classification": 20.0, "predict_rouge1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_rougeL_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_f1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0, "predict_exact_match_for_task1557_jfleg_answer_generation": 20.0, "predict_rouge1_for_task1557_jfleg_answer_generation": 73.7928, "predict_rougeL_for_task1557_jfleg_answer_generation": 73.7928, "predict_f1_for_task1557_jfleg_answer_generation": 71.7726, "predict_exact_match_for_task249_enhanced_wsc_pronoun_disambiguation": 20.0, "predict_rouge1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_rougeL_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_f1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333, "predict_exact_match_for_task957_e2e_nlg_text_generation_generate": 0.0, "predict_rouge1_for_task957_e2e_nlg_text_generation_generate": 52.5426, "predict_rougeL_for_task957_e2e_nlg_text_generation_generate": 43.0283, "predict_f1_for_task957_e2e_nlg_text_generation_generate": 52.8436, "predict_exact_match_for_task418_persent_title_generation": 0.0, "predict_rouge1_for_task418_persent_title_generation": 15.3458, "predict_rougeL_for_task418_persent_title_generation": 10.6399, "predict_f1_for_task418_persent_title_generation": 15.3458, "predict_exact_match_for_task614_glucose_cause_event_detection": 0.0, "predict_rouge1_for_task614_glucose_cause_event_detection": 20.673, "predict_rougeL_for_task614_glucose_cause_event_detection": 20.673, "predict_f1_for_task614_glucose_cause_event_detection": 22.7647, "predict_exact_match_for_task677_ollie_sentence_answer_generation": 0.0, "predict_rouge1_for_task677_ollie_sentence_answer_generation": 28.5268, "predict_rougeL_for_task677_ollie_sentence_answer_generation": 28.5268, "predict_f1_for_task677_ollie_sentence_answer_generation": 27.0794, "predict_exact_match_for_task220_rocstories_title_classification": 100.0, "predict_rouge1_for_task220_rocstories_title_classification": 100.0, "predict_rougeL_for_task220_rocstories_title_classification": 100.0, "predict_f1_for_task220_rocstories_title_classification": 100.0, "predict_exact_match_for_task1631_openpi_answer_generation": 0.0, "predict_rouge1_for_task1631_openpi_answer_generation": 77.0305, "predict_rougeL_for_task1631_openpi_answer_generation": 74.5305, "predict_f1_for_task1631_openpi_answer_generation": 77.0305, "predict_exact_match_for_task232_iirc_link_number_classification": 60.0, "predict_rouge1_for_task232_iirc_link_number_classification": 60.0, "predict_rougeL_for_task232_iirc_link_number_classification": 60.0, "predict_f1_for_task232_iirc_link_number_classification": 60.0, "predict_exact_match_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_rouge1_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_rougeL_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_f1_for_task1391_winogrande_easy_answer_generation": 100.0, "predict_exact_match_for_task1358_xlsum_title_generation": 0.0, "predict_rouge1_for_task1358_xlsum_title_generation": 26.563, "predict_rougeL_for_task1358_xlsum_title_generation": 24.2101, "predict_f1_for_task1358_xlsum_title_generation": 27.1161, "predict_exact_match_for_task1533_daily_dialog_formal_classification": 60.0, "predict_rouge1_for_task1533_daily_dialog_formal_classification": 60.0, "predict_rougeL_for_task1533_daily_dialog_formal_classification": 60.0, "predict_f1_for_task1533_daily_dialog_formal_classification": 60.0, "predict_exact_match_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_rouge1_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_rougeL_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_f1_for_task1156_bard_analogical_reasoning_tools": 40.0, "predict_exact_match_for_task1659_title_generation": 0.0, "predict_rouge1_for_task1659_title_generation": 27.5046, "predict_rougeL_for_task1659_title_generation": 22.3799, "predict_f1_for_task1659_title_generation": 25.0379, "predict_exact_match_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_rouge1_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_rougeL_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_f1_for_task1624_disfl_qa_question_yesno_classification": 60.0, "predict_exact_match_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_rouge1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_rougeL_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_f1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0, "predict_exact_match_for_task827_copa_commonsense_reasoning": 80.0, "predict_rouge1_for_task827_copa_commonsense_reasoning": 80.0, "predict_rougeL_for_task827_copa_commonsense_reasoning": 80.0, "predict_f1_for_task827_copa_commonsense_reasoning": 80.0, "predict_exact_match_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_rouge1_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_rougeL_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_f1_for_task1153_bard_analogical_reasoning_affordance": 20.0, "predict_exact_match_for_task393_plausible_result_generation": 0.0, "predict_rouge1_for_task393_plausible_result_generation": 38.1587, "predict_rougeL_for_task393_plausible_result_generation": 38.1587, "predict_f1_for_task393_plausible_result_generation": 38.1587, "predict_exact_match_for_task879_schema_guided_dstc8_classification": 100.0, "predict_rouge1_for_task879_schema_guided_dstc8_classification": 100.0, "predict_rougeL_for_task879_schema_guided_dstc8_classification": 100.0, "predict_f1_for_task879_schema_guided_dstc8_classification": 100.0, "predict_exact_match_for_task613_politifact_text_generation": 0.0, "predict_rouge1_for_task613_politifact_text_generation": 33.3333, "predict_rougeL_for_task613_politifact_text_generation": 33.3333, "predict_f1_for_task613_politifact_text_generation": 13.3333, "predict_exact_match_for_task219_rocstories_title_answer_generation": 0.0, "predict_rouge1_for_task219_rocstories_title_answer_generation": 20.7143, "predict_rougeL_for_task219_rocstories_title_answer_generation": 20.7143, "predict_f1_for_task219_rocstories_title_answer_generation": 15.7143, "predict_exact_match_for_task190_snli_classification": 40.0, "predict_rouge1_for_task190_snli_classification": 40.0, "predict_rougeL_for_task190_snli_classification": 40.0, "predict_f1_for_task190_snli_classification": 40.0, "predict_exact_match_for_task200_mnli_entailment_classification": 80.0, "predict_rouge1_for_task200_mnli_entailment_classification": 80.0, "predict_rougeL_for_task200_mnli_entailment_classification": 80.0, "predict_f1_for_task200_mnli_entailment_classification": 80.0, "predict_exact_match_for_task1534_daily_dialog_question_classification": 40.0, "predict_rouge1_for_task1534_daily_dialog_question_classification": 40.0, "predict_rougeL_for_task1534_daily_dialog_question_classification": 40.0, "predict_f1_for_task1534_daily_dialog_question_classification": 40.0, "predict_exact_match_for_task1540_parsed_pdfs_summarization": 0.0, "predict_rouge1_for_task1540_parsed_pdfs_summarization": 28.0519, "predict_rougeL_for_task1540_parsed_pdfs_summarization": 28.0519, "predict_f1_for_task1540_parsed_pdfs_summarization": 25.1948, "predict_exact_match_for_task442_com_qa_paraphrase_question_generation": 0.0, "predict_rouge1_for_task442_com_qa_paraphrase_question_generation": 66.8333, "predict_rougeL_for_task442_com_qa_paraphrase_question_generation": 64.3333, "predict_f1_for_task442_com_qa_paraphrase_question_generation": 64.9744, "predict_exact_match_for_task392_inverse_causal_relationship": 80.0, "predict_rouge1_for_task392_inverse_causal_relationship": 93.3333, "predict_rougeL_for_task392_inverse_causal_relationship": 93.3333, "predict_f1_for_task392_inverse_causal_relationship": 93.3333, "predict_exact_match_for_task1562_zest_text_modification": 0.0, "predict_rouge1_for_task1562_zest_text_modification": 47.5095, "predict_rougeL_for_task1562_zest_text_modification": 38.5095, "predict_f1_for_task1562_zest_text_modification": 45.6913, "predict_exact_match_for_task640_esnli_classification": 40.0, "predict_rouge1_for_task640_esnli_classification": 40.0, "predict_rougeL_for_task640_esnli_classification": 40.0, "predict_f1_for_task640_esnli_classification": 40.0, "predict_exact_match_for_task1622_disfl_qa_text_modication": 0.0, "predict_rouge1_for_task1622_disfl_qa_text_modication": 66.5018, "predict_rougeL_for_task1622_disfl_qa_text_modication": 57.2711, "predict_f1_for_task1622_disfl_qa_text_modication": 64.3711, "predict_exact_match_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_rouge1_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_rougeL_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_f1_for_task623_ohsumed_yes_no_answer_generation": 80.0, "predict_exact_match_for_task020_mctaco_span_based_question": 40.0, "predict_rouge1_for_task020_mctaco_span_based_question": 40.0, "predict_rougeL_for_task020_mctaco_span_based_question": 40.0, "predict_f1_for_task020_mctaco_span_based_question": 40.0, "predict_exact_match_for_task642_esnli_classification": 40.0, "predict_rouge1_for_task642_esnli_classification": 40.0, "predict_rougeL_for_task642_esnli_classification": 40.0, "predict_f1_for_task642_esnli_classification": 40.0, "predict_exact_match_for_task102_commongen_sentence_generation": 0.0, "predict_rouge1_for_task102_commongen_sentence_generation": 60.8462, "predict_rougeL_for_task102_commongen_sentence_generation": 47.6154, "predict_f1_for_task102_commongen_sentence_generation": 49.0256, "predict_exact_match_for_title_generation": 11.1111, "predict_rouge1_for_title_generation": 30.5218, "predict_rougeL_for_title_generation": 28.8976, "predict_f1_for_title_generation": 27.9279, "predict_exact_match_for_coreference_resolution": 45.7143, "predict_rouge1_for_coreference_resolution": 56.9388, "predict_rougeL_for_coreference_resolution": 56.9388, "predict_f1_for_coreference_resolution": 56.8571, "predict_exact_match_for_textual_entailment": 40.8333, "predict_rouge1_for_textual_entailment": 47.7778, "predict_rougeL_for_textual_entailment": 47.7778, "predict_f1_for_textual_entailment": 40.8333, "predict_exact_match_for_question_rewriting": 0.0, "predict_rouge1_for_question_rewriting": 57.6828, "predict_rougeL_for_question_rewriting": 54.491, "predict_f1_for_question_rewriting": 56.1871, "predict_exact_match_for_cause_effect_classification": 51.4286, "predict_rouge1_for_cause_effect_classification": 67.4521, "predict_rougeL_for_cause_effect_classification": 67.4521, "predict_f1_for_cause_effect_classification": 67.751, "predict_exact_match_for_dialogue_act_recognition": 51.4286, "predict_rouge1_for_dialogue_act_recognition": 61.9048, "predict_rougeL_for_dialogue_act_recognition": 61.9048, "predict_f1_for_dialogue_act_recognition": 54.2857, "predict_exact_match_for_answerability_classification": 67.6923, "predict_rouge1_for_answerability_classification": 69.7436, "predict_rougeL_for_answerability_classification": 69.7436, "predict_f1_for_answerability_classification": 69.7436, "predict_exact_match_for_keyword_tagging": 44.0, "predict_rouge1_for_keyword_tagging": 64.1333, "predict_rougeL_for_keyword_tagging": 64.1333, "predict_f1_for_keyword_tagging": 60.1333, "predict_exact_match_for_data_to_text": 0.0, "predict_rouge1_for_data_to_text": 39.3261, "predict_rougeL_for_data_to_text": 34.4403, "predict_f1_for_data_to_text": 37.7596, "predict_exact_match_for_word_analogy": 47.5, "predict_rouge1_for_word_analogy": 47.5, "predict_rougeL_for_word_analogy": 47.5, "predict_f1_for_word_analogy": 47.5, "predict_exact_match_for_overlap_extraction": 10.0, "predict_rouge1_for_overlap_extraction": 23.0406, "predict_rougeL_for_overlap_extraction": 22.624, "predict_f1_for_overlap_extraction": 23.8141, "predict_exact_match_for_grammar_error_correction": 20.0, "predict_rouge1_for_grammar_error_correction": 73.7928, "predict_rougeL_for_grammar_error_correction": 73.7928, "predict_f1_for_grammar_error_correction": 71.7726, "predict_gen_len": 4.8739, "predict_global_step": 17980, "predict_runtime": 99.8176, "predict_samples_per_second": 5.961, "predict_steps_per_second": 0.1, "predict_samples": 595}
predict_eval_predictions.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
predict_results.json ADDED
@@ -0,0 +1,538 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "predict_exact_match": 34.1176,
4
+ "predict_exact_match_for_answerability_classification": 67.6923,
5
+ "predict_exact_match_for_cause_effect_classification": 51.4286,
6
+ "predict_exact_match_for_coreference_resolution": 45.7143,
7
+ "predict_exact_match_for_data_to_text": 0.0,
8
+ "predict_exact_match_for_dialogue_act_recognition": 51.4286,
9
+ "predict_exact_match_for_grammar_error_correction": 20.0,
10
+ "predict_exact_match_for_keyword_tagging": 44.0,
11
+ "predict_exact_match_for_overlap_extraction": 10.0,
12
+ "predict_exact_match_for_question_rewriting": 0.0,
13
+ "predict_exact_match_for_task020_mctaco_span_based_question": 40.0,
14
+ "predict_exact_match_for_task033_winogrande_answer_generation": 40.0,
15
+ "predict_exact_match_for_task034_winogrande_question_modification_object": 0.0,
16
+ "predict_exact_match_for_task035_winogrande_question_modification_person": 0.0,
17
+ "predict_exact_match_for_task036_qasc_topic_word_to_generate_related_fact": 40.0,
18
+ "predict_exact_match_for_task039_qasc_find_overlapping_words": 20.0,
19
+ "predict_exact_match_for_task050_multirc_answerability": 100.0,
20
+ "predict_exact_match_for_task102_commongen_sentence_generation": 0.0,
21
+ "predict_exact_match_for_task1152_bard_analogical_reasoning_causation": 20.0,
22
+ "predict_exact_match_for_task1153_bard_analogical_reasoning_affordance": 20.0,
23
+ "predict_exact_match_for_task1154_bard_analogical_reasoning_travel": 40.0,
24
+ "predict_exact_match_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
25
+ "predict_exact_match_for_task1156_bard_analogical_reasoning_tools": 40.0,
26
+ "predict_exact_match_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
27
+ "predict_exact_match_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
28
+ "predict_exact_match_for_task1159_bard_analogical_reasoning_containers": 60.0,
29
+ "predict_exact_match_for_task1161_coda19_title_generation": 0.0,
30
+ "predict_exact_match_for_task1195_disflqa_disfluent_to_fluent_conversion": 0.0,
31
+ "predict_exact_match_for_task121_zest_text_modification": 0.0,
32
+ "predict_exact_match_for_task133_winowhy_reason_plausibility_detection": 20.0,
33
+ "predict_exact_match_for_task1342_amazon_us_reviews_title": 0.0,
34
+ "predict_exact_match_for_task1344_glue_entailment_classification": 40.0,
35
+ "predict_exact_match_for_task1345_glue_qqp_question_paraprashing": 0.0,
36
+ "predict_exact_match_for_task1356_xlsum_title_generation": 0.0,
37
+ "predict_exact_match_for_task1358_xlsum_title_generation": 0.0,
38
+ "predict_exact_match_for_task1385_anli_r1_entailment": 0.0,
39
+ "predict_exact_match_for_task1386_anli_r2_entailment": 0.0,
40
+ "predict_exact_match_for_task1387_anli_r3_entailment": 0.0,
41
+ "predict_exact_match_for_task1388_cb_entailment": 0.0,
42
+ "predict_exact_match_for_task1390_wscfixed_coreference": 60.0,
43
+ "predict_exact_match_for_task1391_winogrande_easy_answer_generation": 100.0,
44
+ "predict_exact_match_for_task1393_superglue_copa_text_completion": 100.0,
45
+ "predict_exact_match_for_task1394_meta_woz_task_classification": 80.0,
46
+ "predict_exact_match_for_task1407_dart_question_generation": 0.0,
47
+ "predict_exact_match_for_task1409_dart_text_generation": 0.0,
48
+ "predict_exact_match_for_task1439_doqa_cooking_isanswerable": 40.0,
49
+ "predict_exact_match_for_task1442_doqa_movies_isanswerable": 80.0,
50
+ "predict_exact_match_for_task1516_imppres_naturallanguageinference": 20.0,
51
+ "predict_exact_match_for_task1529_scitail1.1_classification": 40.0,
52
+ "predict_exact_match_for_task1531_daily_dialog_type_classification": 40.0,
53
+ "predict_exact_match_for_task1533_daily_dialog_formal_classification": 60.0,
54
+ "predict_exact_match_for_task1534_daily_dialog_question_classification": 40.0,
55
+ "predict_exact_match_for_task1540_parsed_pdfs_summarization": 0.0,
56
+ "predict_exact_match_for_task1554_scitail_classification": 40.0,
57
+ "predict_exact_match_for_task1557_jfleg_answer_generation": 20.0,
58
+ "predict_exact_match_for_task1562_zest_text_modification": 0.0,
59
+ "predict_exact_match_for_task1586_scifact_title_generation": 0.0,
60
+ "predict_exact_match_for_task1598_nyc_long_text_generation": 0.0,
61
+ "predict_exact_match_for_task1612_sick_label_classification": 40.0,
62
+ "predict_exact_match_for_task1615_sick_tclassify_b_relation_a": 60.0,
63
+ "predict_exact_match_for_task1622_disfl_qa_text_modication": 0.0,
64
+ "predict_exact_match_for_task1624_disfl_qa_question_yesno_classification": 60.0,
65
+ "predict_exact_match_for_task1631_openpi_answer_generation": 0.0,
66
+ "predict_exact_match_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
67
+ "predict_exact_match_for_task1659_title_generation": 0.0,
68
+ "predict_exact_match_for_task1664_winobias_text_generation": 60.0,
69
+ "predict_exact_match_for_task1728_web_nlg_data_to_text": 0.0,
70
+ "predict_exact_match_for_task190_snli_classification": 40.0,
71
+ "predict_exact_match_for_task199_mnli_classification": 100.0,
72
+ "predict_exact_match_for_task200_mnli_entailment_classification": 80.0,
73
+ "predict_exact_match_for_task201_mnli_neutral_classification": 0.0,
74
+ "predict_exact_match_for_task202_mnli_contradiction_classification": 80.0,
75
+ "predict_exact_match_for_task219_rocstories_title_answer_generation": 0.0,
76
+ "predict_exact_match_for_task220_rocstories_title_classification": 100.0,
77
+ "predict_exact_match_for_task226_english_language_answer_relevance_classification": 40.0,
78
+ "predict_exact_match_for_task232_iirc_link_number_classification": 60.0,
79
+ "predict_exact_match_for_task233_iirc_link_exists_classification": 40.0,
80
+ "predict_exact_match_for_task242_tweetqa_classification": 80.0,
81
+ "predict_exact_match_for_task249_enhanced_wsc_pronoun_disambiguation": 20.0,
82
+ "predict_exact_match_for_task281_points_of_correspondence": 0.0,
83
+ "predict_exact_match_for_task288_gigaword_summarization": 0.0,
84
+ "predict_exact_match_for_task290_tellmewhy_question_answerability": 60.0,
85
+ "predict_exact_match_for_task304_numeric_fused_head_resolution": 0.0,
86
+ "predict_exact_match_for_task329_gap_classification": 60.0,
87
+ "predict_exact_match_for_task330_gap_answer_generation": 80.0,
88
+ "predict_exact_match_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
89
+ "predict_exact_match_for_task362_spolin_yesand_prompt_response_sub_classification": 20.0,
90
+ "predict_exact_match_for_task391_causal_relationship": 40.0,
91
+ "predict_exact_match_for_task392_inverse_causal_relationship": 80.0,
92
+ "predict_exact_match_for_task393_plausible_result_generation": 0.0,
93
+ "predict_exact_match_for_task401_numeric_fused_head_reference": 20.0,
94
+ "predict_exact_match_for_task402_grailqa_paraphrase_generation": 0.0,
95
+ "predict_exact_match_for_task418_persent_title_generation": 0.0,
96
+ "predict_exact_match_for_task442_com_qa_paraphrase_question_generation": 0.0,
97
+ "predict_exact_match_for_task500_scruples_anecdotes_title_generation": 0.0,
98
+ "predict_exact_match_for_task510_reddit_tifu_title_summarization": 0.0,
99
+ "predict_exact_match_for_task520_aquamuse_answer_given_in_passage": 100.0,
100
+ "predict_exact_match_for_task569_recipe_nlg_text_generation": 0.0,
101
+ "predict_exact_match_for_task602_wikitext-103_answer_generation": 0.0,
102
+ "predict_exact_match_for_task613_politifact_text_generation": 0.0,
103
+ "predict_exact_match_for_task614_glucose_cause_event_detection": 0.0,
104
+ "predict_exact_match_for_task619_ohsumed_abstract_title_generation": 0.0,
105
+ "predict_exact_match_for_task620_ohsumed_medical_subject_headings_answer_generation": 0.0,
106
+ "predict_exact_match_for_task623_ohsumed_yes_no_answer_generation": 80.0,
107
+ "predict_exact_match_for_task640_esnli_classification": 40.0,
108
+ "predict_exact_match_for_task641_esnli_classification": 60.0,
109
+ "predict_exact_match_for_task642_esnli_classification": 40.0,
110
+ "predict_exact_match_for_task645_summarization": 100.0,
111
+ "predict_exact_match_for_task648_answer_generation": 0.0,
112
+ "predict_exact_match_for_task670_ambigqa_question_generation": 0.0,
113
+ "predict_exact_match_for_task671_ambigqa_text_generation": 0.0,
114
+ "predict_exact_match_for_task677_ollie_sentence_answer_generation": 0.0,
115
+ "predict_exact_match_for_task738_perspectrum_classification": 60.0,
116
+ "predict_exact_match_for_task743_eurlex_summarization": 0.0,
117
+ "predict_exact_match_for_task760_msr_sqa_long_text_generation": 0.0,
118
+ "predict_exact_match_for_task769_qed_summarization": 100.0,
119
+ "predict_exact_match_for_task827_copa_commonsense_reasoning": 80.0,
120
+ "predict_exact_match_for_task828_copa_commonsense_cause_effect": 60.0,
121
+ "predict_exact_match_for_task879_schema_guided_dstc8_classification": 100.0,
122
+ "predict_exact_match_for_task880_schema_guided_dstc8_classification": 20.0,
123
+ "predict_exact_match_for_task890_gcwd_classification": 40.0,
124
+ "predict_exact_match_for_task891_gap_coreference_resolution": 40.0,
125
+ "predict_exact_match_for_task892_gap_reverse_coreference_resolution": 40.0,
126
+ "predict_exact_match_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
127
+ "predict_exact_match_for_task935_defeasible_nli_atomic_classification": 40.0,
128
+ "predict_exact_match_for_task936_defeasible_nli_snli_classification": 40.0,
129
+ "predict_exact_match_for_task937_defeasible_nli_social_classification": 20.0,
130
+ "predict_exact_match_for_task957_e2e_nlg_text_generation_generate": 0.0,
131
+ "predict_exact_match_for_task970_sherliic_causal_relationship": 100.0,
132
+ "predict_exact_match_for_textual_entailment": 40.8333,
133
+ "predict_exact_match_for_title_generation": 11.1111,
134
+ "predict_exact_match_for_word_analogy": 47.5,
135
+ "predict_f1": 48.7192,
136
+ "predict_f1_for_answerability_classification": 69.7436,
137
+ "predict_f1_for_cause_effect_classification": 67.751,
138
+ "predict_f1_for_coreference_resolution": 56.8571,
139
+ "predict_f1_for_data_to_text": 37.7596,
140
+ "predict_f1_for_dialogue_act_recognition": 54.2857,
141
+ "predict_f1_for_grammar_error_correction": 71.7726,
142
+ "predict_f1_for_keyword_tagging": 60.1333,
143
+ "predict_f1_for_overlap_extraction": 23.8141,
144
+ "predict_f1_for_question_rewriting": 56.1871,
145
+ "predict_f1_for_task020_mctaco_span_based_question": 40.0,
146
+ "predict_f1_for_task033_winogrande_answer_generation": 66.6667,
147
+ "predict_f1_for_task034_winogrande_question_modification_object": 61.7974,
148
+ "predict_f1_for_task035_winogrande_question_modification_person": 58.9922,
149
+ "predict_f1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
150
+ "predict_f1_for_task039_qasc_find_overlapping_words": 20.0,
151
+ "predict_f1_for_task050_multirc_answerability": 100.0,
152
+ "predict_f1_for_task102_commongen_sentence_generation": 49.0256,
153
+ "predict_f1_for_task1152_bard_analogical_reasoning_causation": 20.0,
154
+ "predict_f1_for_task1153_bard_analogical_reasoning_affordance": 20.0,
155
+ "predict_f1_for_task1154_bard_analogical_reasoning_travel": 40.0,
156
+ "predict_f1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
157
+ "predict_f1_for_task1156_bard_analogical_reasoning_tools": 40.0,
158
+ "predict_f1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
159
+ "predict_f1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
160
+ "predict_f1_for_task1159_bard_analogical_reasoning_containers": 60.0,
161
+ "predict_f1_for_task1161_coda19_title_generation": 19.4872,
162
+ "predict_f1_for_task1195_disflqa_disfluent_to_fluent_conversion": 62.1678,
163
+ "predict_f1_for_task121_zest_text_modification": 42.083,
164
+ "predict_f1_for_task133_winowhy_reason_plausibility_detection": 20.0,
165
+ "predict_f1_for_task1342_amazon_us_reviews_title": 8.9697,
166
+ "predict_f1_for_task1344_glue_entailment_classification": 40.0,
167
+ "predict_f1_for_task1345_glue_qqp_question_paraprashing": 21.6667,
168
+ "predict_f1_for_task1356_xlsum_title_generation": 16.0,
169
+ "predict_f1_for_task1358_xlsum_title_generation": 27.1161,
170
+ "predict_f1_for_task1385_anli_r1_entailment": 0.0,
171
+ "predict_f1_for_task1386_anli_r2_entailment": 0.0,
172
+ "predict_f1_for_task1387_anli_r3_entailment": 0.0,
173
+ "predict_f1_for_task1388_cb_entailment": 0.0,
174
+ "predict_f1_for_task1390_wscfixed_coreference": 60.0,
175
+ "predict_f1_for_task1391_winogrande_easy_answer_generation": 100.0,
176
+ "predict_f1_for_task1393_superglue_copa_text_completion": 100.0,
177
+ "predict_f1_for_task1394_meta_woz_task_classification": 80.0,
178
+ "predict_f1_for_task1407_dart_question_generation": 23.7903,
179
+ "predict_f1_for_task1409_dart_text_generation": 39.9284,
180
+ "predict_f1_for_task1439_doqa_cooking_isanswerable": 40.0,
181
+ "predict_f1_for_task1442_doqa_movies_isanswerable": 80.0,
182
+ "predict_f1_for_task1516_imppres_naturallanguageinference": 20.0,
183
+ "predict_f1_for_task1529_scitail1.1_classification": 40.0,
184
+ "predict_f1_for_task1531_daily_dialog_type_classification": 40.0,
185
+ "predict_f1_for_task1533_daily_dialog_formal_classification": 60.0,
186
+ "predict_f1_for_task1534_daily_dialog_question_classification": 40.0,
187
+ "predict_f1_for_task1540_parsed_pdfs_summarization": 25.1948,
188
+ "predict_f1_for_task1554_scitail_classification": 40.0,
189
+ "predict_f1_for_task1557_jfleg_answer_generation": 71.7726,
190
+ "predict_f1_for_task1562_zest_text_modification": 45.6913,
191
+ "predict_f1_for_task1586_scifact_title_generation": 16.1509,
192
+ "predict_f1_for_task1598_nyc_long_text_generation": 27.3109,
193
+ "predict_f1_for_task1612_sick_label_classification": 40.0,
194
+ "predict_f1_for_task1615_sick_tclassify_b_relation_a": 60.0,
195
+ "predict_f1_for_task1622_disfl_qa_text_modication": 64.3711,
196
+ "predict_f1_for_task1624_disfl_qa_question_yesno_classification": 60.0,
197
+ "predict_f1_for_task1631_openpi_answer_generation": 77.0305,
198
+ "predict_f1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
199
+ "predict_f1_for_task1659_title_generation": 25.0379,
200
+ "predict_f1_for_task1664_winobias_text_generation": 80.0,
201
+ "predict_f1_for_task1728_web_nlg_data_to_text": 42.0333,
202
+ "predict_f1_for_task190_snli_classification": 40.0,
203
+ "predict_f1_for_task199_mnli_classification": 100.0,
204
+ "predict_f1_for_task200_mnli_entailment_classification": 80.0,
205
+ "predict_f1_for_task201_mnli_neutral_classification": 0.0,
206
+ "predict_f1_for_task202_mnli_contradiction_classification": 80.0,
207
+ "predict_f1_for_task219_rocstories_title_answer_generation": 15.7143,
208
+ "predict_f1_for_task220_rocstories_title_classification": 100.0,
209
+ "predict_f1_for_task226_english_language_answer_relevance_classification": 40.0,
210
+ "predict_f1_for_task232_iirc_link_number_classification": 60.0,
211
+ "predict_f1_for_task233_iirc_link_exists_classification": 40.0,
212
+ "predict_f1_for_task242_tweetqa_classification": 80.0,
213
+ "predict_f1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
214
+ "predict_f1_for_task281_points_of_correspondence": 27.6283,
215
+ "predict_f1_for_task288_gigaword_summarization": 10.3571,
216
+ "predict_f1_for_task290_tellmewhy_question_answerability": 86.6667,
217
+ "predict_f1_for_task304_numeric_fused_head_resolution": 13.3333,
218
+ "predict_f1_for_task329_gap_classification": 60.0,
219
+ "predict_f1_for_task330_gap_answer_generation": 96.0,
220
+ "predict_f1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
221
+ "predict_f1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
222
+ "predict_f1_for_task391_causal_relationship": 80.0,
223
+ "predict_f1_for_task392_inverse_causal_relationship": 93.3333,
224
+ "predict_f1_for_task393_plausible_result_generation": 38.1587,
225
+ "predict_f1_for_task401_numeric_fused_head_reference": 43.3333,
226
+ "predict_f1_for_task402_grailqa_paraphrase_generation": 58.1832,
227
+ "predict_f1_for_task418_persent_title_generation": 15.3458,
228
+ "predict_f1_for_task442_com_qa_paraphrase_question_generation": 64.9744,
229
+ "predict_f1_for_task500_scruples_anecdotes_title_generation": 3.0769,
230
+ "predict_f1_for_task510_reddit_tifu_title_summarization": 38.265,
231
+ "predict_f1_for_task520_aquamuse_answer_given_in_passage": 100.0,
232
+ "predict_f1_for_task569_recipe_nlg_text_generation": 20.6061,
233
+ "predict_f1_for_task602_wikitext-103_answer_generation": 0.0,
234
+ "predict_f1_for_task613_politifact_text_generation": 13.3333,
235
+ "predict_f1_for_task614_glucose_cause_event_detection": 22.7647,
236
+ "predict_f1_for_task619_ohsumed_abstract_title_generation": 34.2082,
237
+ "predict_f1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
238
+ "predict_f1_for_task623_ohsumed_yes_no_answer_generation": 80.0,
239
+ "predict_f1_for_task640_esnli_classification": 40.0,
240
+ "predict_f1_for_task641_esnli_classification": 60.0,
241
+ "predict_f1_for_task642_esnli_classification": 40.0,
242
+ "predict_f1_for_task645_summarization": 100.0,
243
+ "predict_f1_for_task648_answer_generation": 13.3333,
244
+ "predict_f1_for_task670_ambigqa_question_generation": 75.6778,
245
+ "predict_f1_for_task671_ambigqa_text_generation": 62.4536,
246
+ "predict_f1_for_task677_ollie_sentence_answer_generation": 27.0794,
247
+ "predict_f1_for_task738_perspectrum_classification": 60.0,
248
+ "predict_f1_for_task743_eurlex_summarization": 27.1719,
249
+ "predict_f1_for_task760_msr_sqa_long_text_generation": 0.7946,
250
+ "predict_f1_for_task769_qed_summarization": 100.0,
251
+ "predict_f1_for_task827_copa_commonsense_reasoning": 80.0,
252
+ "predict_f1_for_task828_copa_commonsense_cause_effect": 60.0,
253
+ "predict_f1_for_task879_schema_guided_dstc8_classification": 100.0,
254
+ "predict_f1_for_task880_schema_guided_dstc8_classification": 20.0,
255
+ "predict_f1_for_task890_gcwd_classification": 40.0,
256
+ "predict_f1_for_task891_gap_coreference_resolution": 50.0,
257
+ "predict_f1_for_task892_gap_reverse_coreference_resolution": 40.0,
258
+ "predict_f1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
259
+ "predict_f1_for_task935_defeasible_nli_atomic_classification": 40.0,
260
+ "predict_f1_for_task936_defeasible_nli_snli_classification": 40.0,
261
+ "predict_f1_for_task937_defeasible_nli_social_classification": 20.0,
262
+ "predict_f1_for_task957_e2e_nlg_text_generation_generate": 52.8436,
263
+ "predict_f1_for_task970_sherliic_causal_relationship": 100.0,
264
+ "predict_f1_for_textual_entailment": 40.8333,
265
+ "predict_f1_for_title_generation": 27.9279,
266
+ "predict_f1_for_word_analogy": 47.5,
267
+ "predict_gen_len": 4.8739,
268
+ "predict_global_step": 17980,
269
+ "predict_loss": 1.446231484413147,
270
+ "predict_rouge1": 51.3811,
271
+ "predict_rouge1_for_answerability_classification": 69.7436,
272
+ "predict_rouge1_for_cause_effect_classification": 67.4521,
273
+ "predict_rouge1_for_coreference_resolution": 56.9388,
274
+ "predict_rouge1_for_data_to_text": 39.3261,
275
+ "predict_rouge1_for_dialogue_act_recognition": 61.9048,
276
+ "predict_rouge1_for_grammar_error_correction": 73.7928,
277
+ "predict_rouge1_for_keyword_tagging": 64.1333,
278
+ "predict_rouge1_for_overlap_extraction": 23.0406,
279
+ "predict_rouge1_for_question_rewriting": 57.6828,
280
+ "predict_rouge1_for_task020_mctaco_span_based_question": 40.0,
281
+ "predict_rouge1_for_task033_winogrande_answer_generation": 66.6667,
282
+ "predict_rouge1_for_task034_winogrande_question_modification_object": 61.7974,
283
+ "predict_rouge1_for_task035_winogrande_question_modification_person": 58.9922,
284
+ "predict_rouge1_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
285
+ "predict_rouge1_for_task039_qasc_find_overlapping_words": 20.0,
286
+ "predict_rouge1_for_task050_multirc_answerability": 100.0,
287
+ "predict_rouge1_for_task102_commongen_sentence_generation": 60.8462,
288
+ "predict_rouge1_for_task1152_bard_analogical_reasoning_causation": 20.0,
289
+ "predict_rouge1_for_task1153_bard_analogical_reasoning_affordance": 20.0,
290
+ "predict_rouge1_for_task1154_bard_analogical_reasoning_travel": 40.0,
291
+ "predict_rouge1_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
292
+ "predict_rouge1_for_task1156_bard_analogical_reasoning_tools": 40.0,
293
+ "predict_rouge1_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
294
+ "predict_rouge1_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
295
+ "predict_rouge1_for_task1159_bard_analogical_reasoning_containers": 60.0,
296
+ "predict_rouge1_for_task1161_coda19_title_generation": 25.5618,
297
+ "predict_rouge1_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873,
298
+ "predict_rouge1_for_task121_zest_text_modification": 47.5745,
299
+ "predict_rouge1_for_task133_winowhy_reason_plausibility_detection": 20.0,
300
+ "predict_rouge1_for_task1342_amazon_us_reviews_title": 8.9697,
301
+ "predict_rouge1_for_task1344_glue_entailment_classification": 40.0,
302
+ "predict_rouge1_for_task1345_glue_qqp_question_paraprashing": 21.3333,
303
+ "predict_rouge1_for_task1356_xlsum_title_generation": 18.6667,
304
+ "predict_rouge1_for_task1358_xlsum_title_generation": 26.563,
305
+ "predict_rouge1_for_task1385_anli_r1_entailment": 20.0,
306
+ "predict_rouge1_for_task1386_anli_r2_entailment": 40.0,
307
+ "predict_rouge1_for_task1387_anli_r3_entailment": 20.0,
308
+ "predict_rouge1_for_task1388_cb_entailment": 40.0,
309
+ "predict_rouge1_for_task1390_wscfixed_coreference": 60.0,
310
+ "predict_rouge1_for_task1391_winogrande_easy_answer_generation": 100.0,
311
+ "predict_rouge1_for_task1393_superglue_copa_text_completion": 100.0,
312
+ "predict_rouge1_for_task1394_meta_woz_task_classification": 93.3333,
313
+ "predict_rouge1_for_task1407_dart_question_generation": 24.086,
314
+ "predict_rouge1_for_task1409_dart_text_generation": 41.9306,
315
+ "predict_rouge1_for_task1439_doqa_cooking_isanswerable": 40.0,
316
+ "predict_rouge1_for_task1442_doqa_movies_isanswerable": 80.0,
317
+ "predict_rouge1_for_task1516_imppres_naturallanguageinference": 20.0,
318
+ "predict_rouge1_for_task1529_scitail1.1_classification": 40.0,
319
+ "predict_rouge1_for_task1531_daily_dialog_type_classification": 40.0,
320
+ "predict_rouge1_for_task1533_daily_dialog_formal_classification": 60.0,
321
+ "predict_rouge1_for_task1534_daily_dialog_question_classification": 40.0,
322
+ "predict_rouge1_for_task1540_parsed_pdfs_summarization": 28.0519,
323
+ "predict_rouge1_for_task1554_scitail_classification": 40.0,
324
+ "predict_rouge1_for_task1557_jfleg_answer_generation": 73.7928,
325
+ "predict_rouge1_for_task1562_zest_text_modification": 47.5095,
326
+ "predict_rouge1_for_task1586_scifact_title_generation": 17.3747,
327
+ "predict_rouge1_for_task1598_nyc_long_text_generation": 27.0,
328
+ "predict_rouge1_for_task1612_sick_label_classification": 40.0,
329
+ "predict_rouge1_for_task1615_sick_tclassify_b_relation_a": 86.6667,
330
+ "predict_rouge1_for_task1622_disfl_qa_text_modication": 66.5018,
331
+ "predict_rouge1_for_task1624_disfl_qa_question_yesno_classification": 60.0,
332
+ "predict_rouge1_for_task1631_openpi_answer_generation": 77.0305,
333
+ "predict_rouge1_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
334
+ "predict_rouge1_for_task1659_title_generation": 27.5046,
335
+ "predict_rouge1_for_task1664_winobias_text_generation": 80.0,
336
+ "predict_rouge1_for_task1728_web_nlg_data_to_text": 41.1615,
337
+ "predict_rouge1_for_task190_snli_classification": 40.0,
338
+ "predict_rouge1_for_task199_mnli_classification": 100.0,
339
+ "predict_rouge1_for_task200_mnli_entailment_classification": 80.0,
340
+ "predict_rouge1_for_task201_mnli_neutral_classification": 0.0,
341
+ "predict_rouge1_for_task202_mnli_contradiction_classification": 80.0,
342
+ "predict_rouge1_for_task219_rocstories_title_answer_generation": 20.7143,
343
+ "predict_rouge1_for_task220_rocstories_title_classification": 100.0,
344
+ "predict_rouge1_for_task226_english_language_answer_relevance_classification": 40.0,
345
+ "predict_rouge1_for_task232_iirc_link_number_classification": 60.0,
346
+ "predict_rouge1_for_task233_iirc_link_exists_classification": 40.0,
347
+ "predict_rouge1_for_task242_tweetqa_classification": 80.0,
348
+ "predict_rouge1_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
349
+ "predict_rouge1_for_task281_points_of_correspondence": 26.0813,
350
+ "predict_rouge1_for_task288_gigaword_summarization": 23.8571,
351
+ "predict_rouge1_for_task290_tellmewhy_question_answerability": 86.6667,
352
+ "predict_rouge1_for_task304_numeric_fused_head_resolution": 13.3333,
353
+ "predict_rouge1_for_task329_gap_classification": 60.0,
354
+ "predict_rouge1_for_task330_gap_answer_generation": 97.1429,
355
+ "predict_rouge1_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
356
+ "predict_rouge1_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
357
+ "predict_rouge1_for_task391_causal_relationship": 80.0,
358
+ "predict_rouge1_for_task392_inverse_causal_relationship": 93.3333,
359
+ "predict_rouge1_for_task393_plausible_result_generation": 38.1587,
360
+ "predict_rouge1_for_task401_numeric_fused_head_reference": 43.3333,
361
+ "predict_rouge1_for_task402_grailqa_paraphrase_generation": 60.8498,
362
+ "predict_rouge1_for_task418_persent_title_generation": 15.3458,
363
+ "predict_rouge1_for_task442_com_qa_paraphrase_question_generation": 66.8333,
364
+ "predict_rouge1_for_task500_scruples_anecdotes_title_generation": 2.8571,
365
+ "predict_rouge1_for_task510_reddit_tifu_title_summarization": 39.811,
366
+ "predict_rouge1_for_task520_aquamuse_answer_given_in_passage": 100.0,
367
+ "predict_rouge1_for_task569_recipe_nlg_text_generation": 24.2424,
368
+ "predict_rouge1_for_task602_wikitext-103_answer_generation": 0.0,
369
+ "predict_rouge1_for_task613_politifact_text_generation": 33.3333,
370
+ "predict_rouge1_for_task614_glucose_cause_event_detection": 20.673,
371
+ "predict_rouge1_for_task619_ohsumed_abstract_title_generation": 38.4305,
372
+ "predict_rouge1_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
373
+ "predict_rouge1_for_task623_ohsumed_yes_no_answer_generation": 80.0,
374
+ "predict_rouge1_for_task640_esnli_classification": 40.0,
375
+ "predict_rouge1_for_task641_esnli_classification": 60.0,
376
+ "predict_rouge1_for_task642_esnli_classification": 40.0,
377
+ "predict_rouge1_for_task645_summarization": 100.0,
378
+ "predict_rouge1_for_task648_answer_generation": 13.3333,
379
+ "predict_rouge1_for_task670_ambigqa_question_generation": 75.6778,
380
+ "predict_rouge1_for_task671_ambigqa_text_generation": 62.4536,
381
+ "predict_rouge1_for_task677_ollie_sentence_answer_generation": 28.5268,
382
+ "predict_rouge1_for_task738_perspectrum_classification": 60.0,
383
+ "predict_rouge1_for_task743_eurlex_summarization": 31.4423,
384
+ "predict_rouge1_for_task760_msr_sqa_long_text_generation": 0.8103,
385
+ "predict_rouge1_for_task769_qed_summarization": 100.0,
386
+ "predict_rouge1_for_task827_copa_commonsense_reasoning": 80.0,
387
+ "predict_rouge1_for_task828_copa_commonsense_cause_effect": 60.0,
388
+ "predict_rouge1_for_task879_schema_guided_dstc8_classification": 100.0,
389
+ "predict_rouge1_for_task880_schema_guided_dstc8_classification": 60.0,
390
+ "predict_rouge1_for_task890_gcwd_classification": 40.0,
391
+ "predict_rouge1_for_task891_gap_coreference_resolution": 50.0,
392
+ "predict_rouge1_for_task892_gap_reverse_coreference_resolution": 40.0,
393
+ "predict_rouge1_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
394
+ "predict_rouge1_for_task935_defeasible_nli_atomic_classification": 60.0,
395
+ "predict_rouge1_for_task936_defeasible_nli_snli_classification": 40.0,
396
+ "predict_rouge1_for_task937_defeasible_nli_social_classification": 20.0,
397
+ "predict_rouge1_for_task957_e2e_nlg_text_generation_generate": 52.5426,
398
+ "predict_rouge1_for_task970_sherliic_causal_relationship": 100.0,
399
+ "predict_rouge1_for_textual_entailment": 47.7778,
400
+ "predict_rouge1_for_title_generation": 30.5218,
401
+ "predict_rouge1_for_word_analogy": 47.5,
402
+ "predict_rougeL": 50.4639,
403
+ "predict_rougeL_for_answerability_classification": 69.7436,
404
+ "predict_rougeL_for_cause_effect_classification": 67.4521,
405
+ "predict_rougeL_for_coreference_resolution": 56.9388,
406
+ "predict_rougeL_for_data_to_text": 34.4403,
407
+ "predict_rougeL_for_dialogue_act_recognition": 61.9048,
408
+ "predict_rougeL_for_grammar_error_correction": 73.7928,
409
+ "predict_rougeL_for_keyword_tagging": 64.1333,
410
+ "predict_rougeL_for_overlap_extraction": 22.624,
411
+ "predict_rougeL_for_question_rewriting": 54.491,
412
+ "predict_rougeL_for_task020_mctaco_span_based_question": 40.0,
413
+ "predict_rougeL_for_task033_winogrande_answer_generation": 66.6667,
414
+ "predict_rougeL_for_task034_winogrande_question_modification_object": 61.7974,
415
+ "predict_rougeL_for_task035_winogrande_question_modification_person": 58.9922,
416
+ "predict_rougeL_for_task036_qasc_topic_word_to_generate_related_fact": 66.0,
417
+ "predict_rougeL_for_task039_qasc_find_overlapping_words": 20.0,
418
+ "predict_rougeL_for_task050_multirc_answerability": 100.0,
419
+ "predict_rougeL_for_task102_commongen_sentence_generation": 47.6154,
420
+ "predict_rougeL_for_task1152_bard_analogical_reasoning_causation": 20.0,
421
+ "predict_rougeL_for_task1153_bard_analogical_reasoning_affordance": 20.0,
422
+ "predict_rougeL_for_task1154_bard_analogical_reasoning_travel": 40.0,
423
+ "predict_rougeL_for_task1155_bard_analogical_reasoning_trash_or_treasure": 100.0,
424
+ "predict_rougeL_for_task1156_bard_analogical_reasoning_tools": 40.0,
425
+ "predict_rougeL_for_task1157_bard_analogical_reasoning_rooms_for_containers": 80.0,
426
+ "predict_rougeL_for_task1158_bard_analogical_reasoning_manipulating_items": 20.0,
427
+ "predict_rougeL_for_task1159_bard_analogical_reasoning_containers": 60.0,
428
+ "predict_rougeL_for_task1161_coda19_title_generation": 22.8951,
429
+ "predict_rougeL_for_task1195_disflqa_disfluent_to_fluent_conversion": 64.9873,
430
+ "predict_rougeL_for_task121_zest_text_modification": 41.2253,
431
+ "predict_rougeL_for_task133_winowhy_reason_plausibility_detection": 20.0,
432
+ "predict_rougeL_for_task1342_amazon_us_reviews_title": 6.303,
433
+ "predict_rougeL_for_task1344_glue_entailment_classification": 40.0,
434
+ "predict_rougeL_for_task1345_glue_qqp_question_paraprashing": 21.3333,
435
+ "predict_rougeL_for_task1356_xlsum_title_generation": 16.0,
436
+ "predict_rougeL_for_task1358_xlsum_title_generation": 24.2101,
437
+ "predict_rougeL_for_task1385_anli_r1_entailment": 20.0,
438
+ "predict_rougeL_for_task1386_anli_r2_entailment": 40.0,
439
+ "predict_rougeL_for_task1387_anli_r3_entailment": 20.0,
440
+ "predict_rougeL_for_task1388_cb_entailment": 40.0,
441
+ "predict_rougeL_for_task1390_wscfixed_coreference": 60.0,
442
+ "predict_rougeL_for_task1391_winogrande_easy_answer_generation": 100.0,
443
+ "predict_rougeL_for_task1393_superglue_copa_text_completion": 100.0,
444
+ "predict_rougeL_for_task1394_meta_woz_task_classification": 93.3333,
445
+ "predict_rougeL_for_task1407_dart_question_generation": 20.8043,
446
+ "predict_rougeL_for_task1409_dart_text_generation": 36.5007,
447
+ "predict_rougeL_for_task1439_doqa_cooking_isanswerable": 40.0,
448
+ "predict_rougeL_for_task1442_doqa_movies_isanswerable": 80.0,
449
+ "predict_rougeL_for_task1516_imppres_naturallanguageinference": 20.0,
450
+ "predict_rougeL_for_task1529_scitail1.1_classification": 40.0,
451
+ "predict_rougeL_for_task1531_daily_dialog_type_classification": 40.0,
452
+ "predict_rougeL_for_task1533_daily_dialog_formal_classification": 60.0,
453
+ "predict_rougeL_for_task1534_daily_dialog_question_classification": 40.0,
454
+ "predict_rougeL_for_task1540_parsed_pdfs_summarization": 28.0519,
455
+ "predict_rougeL_for_task1554_scitail_classification": 40.0,
456
+ "predict_rougeL_for_task1557_jfleg_answer_generation": 73.7928,
457
+ "predict_rougeL_for_task1562_zest_text_modification": 38.5095,
458
+ "predict_rougeL_for_task1586_scifact_title_generation": 17.3747,
459
+ "predict_rougeL_for_task1598_nyc_long_text_generation": 25.8889,
460
+ "predict_rougeL_for_task1612_sick_label_classification": 40.0,
461
+ "predict_rougeL_for_task1615_sick_tclassify_b_relation_a": 86.6667,
462
+ "predict_rougeL_for_task1622_disfl_qa_text_modication": 57.2711,
463
+ "predict_rougeL_for_task1624_disfl_qa_question_yesno_classification": 60.0,
464
+ "predict_rougeL_for_task1631_openpi_answer_generation": 74.5305,
465
+ "predict_rougeL_for_task1640_aqa1.0_answerable_unanswerable_question_classification": 100.0,
466
+ "predict_rougeL_for_task1659_title_generation": 22.3799,
467
+ "predict_rougeL_for_task1664_winobias_text_generation": 80.0,
468
+ "predict_rougeL_for_task1728_web_nlg_data_to_text": 32.2577,
469
+ "predict_rougeL_for_task190_snli_classification": 40.0,
470
+ "predict_rougeL_for_task199_mnli_classification": 100.0,
471
+ "predict_rougeL_for_task200_mnli_entailment_classification": 80.0,
472
+ "predict_rougeL_for_task201_mnli_neutral_classification": 0.0,
473
+ "predict_rougeL_for_task202_mnli_contradiction_classification": 80.0,
474
+ "predict_rougeL_for_task219_rocstories_title_answer_generation": 20.7143,
475
+ "predict_rougeL_for_task220_rocstories_title_classification": 100.0,
476
+ "predict_rougeL_for_task226_english_language_answer_relevance_classification": 40.0,
477
+ "predict_rougeL_for_task232_iirc_link_number_classification": 60.0,
478
+ "predict_rougeL_for_task233_iirc_link_exists_classification": 40.0,
479
+ "predict_rougeL_for_task242_tweetqa_classification": 80.0,
480
+ "predict_rougeL_for_task249_enhanced_wsc_pronoun_disambiguation": 53.3333,
481
+ "predict_rougeL_for_task281_points_of_correspondence": 25.248,
482
+ "predict_rougeL_for_task288_gigaword_summarization": 23.8571,
483
+ "predict_rougeL_for_task290_tellmewhy_question_answerability": 86.6667,
484
+ "predict_rougeL_for_task304_numeric_fused_head_resolution": 13.3333,
485
+ "predict_rougeL_for_task329_gap_classification": 60.0,
486
+ "predict_rougeL_for_task330_gap_answer_generation": 97.1429,
487
+ "predict_rougeL_for_task349_squad2.0_answerable_unanswerable_question_classification": 80.0,
488
+ "predict_rougeL_for_task362_spolin_yesand_prompt_response_sub_classification": 40.0,
489
+ "predict_rougeL_for_task391_causal_relationship": 80.0,
490
+ "predict_rougeL_for_task392_inverse_causal_relationship": 93.3333,
491
+ "predict_rougeL_for_task393_plausible_result_generation": 38.1587,
492
+ "predict_rougeL_for_task401_numeric_fused_head_reference": 43.3333,
493
+ "predict_rougeL_for_task402_grailqa_paraphrase_generation": 55.8974,
494
+ "predict_rougeL_for_task418_persent_title_generation": 10.6399,
495
+ "predict_rougeL_for_task442_com_qa_paraphrase_question_generation": 64.3333,
496
+ "predict_rougeL_for_task500_scruples_anecdotes_title_generation": 2.8571,
497
+ "predict_rougeL_for_task510_reddit_tifu_title_summarization": 39.811,
498
+ "predict_rougeL_for_task520_aquamuse_answer_given_in_passage": 100.0,
499
+ "predict_rougeL_for_task569_recipe_nlg_text_generation": 20.6061,
500
+ "predict_rougeL_for_task602_wikitext-103_answer_generation": 0.0,
501
+ "predict_rougeL_for_task613_politifact_text_generation": 33.3333,
502
+ "predict_rougeL_for_task614_glucose_cause_event_detection": 20.673,
503
+ "predict_rougeL_for_task619_ohsumed_abstract_title_generation": 38.4305,
504
+ "predict_rougeL_for_task620_ohsumed_medical_subject_headings_answer_generation": 41.3333,
505
+ "predict_rougeL_for_task623_ohsumed_yes_no_answer_generation": 80.0,
506
+ "predict_rougeL_for_task640_esnli_classification": 40.0,
507
+ "predict_rougeL_for_task641_esnli_classification": 60.0,
508
+ "predict_rougeL_for_task642_esnli_classification": 40.0,
509
+ "predict_rougeL_for_task645_summarization": 100.0,
510
+ "predict_rougeL_for_task648_answer_generation": 13.3333,
511
+ "predict_rougeL_for_task670_ambigqa_question_generation": 75.6778,
512
+ "predict_rougeL_for_task671_ambigqa_text_generation": 59.3766,
513
+ "predict_rougeL_for_task677_ollie_sentence_answer_generation": 28.5268,
514
+ "predict_rougeL_for_task738_perspectrum_classification": 60.0,
515
+ "predict_rougeL_for_task743_eurlex_summarization": 26.0256,
516
+ "predict_rougeL_for_task760_msr_sqa_long_text_generation": 0.8103,
517
+ "predict_rougeL_for_task769_qed_summarization": 100.0,
518
+ "predict_rougeL_for_task827_copa_commonsense_reasoning": 80.0,
519
+ "predict_rougeL_for_task828_copa_commonsense_cause_effect": 60.0,
520
+ "predict_rougeL_for_task879_schema_guided_dstc8_classification": 100.0,
521
+ "predict_rougeL_for_task880_schema_guided_dstc8_classification": 60.0,
522
+ "predict_rougeL_for_task890_gcwd_classification": 40.0,
523
+ "predict_rougeL_for_task891_gap_coreference_resolution": 50.0,
524
+ "predict_rougeL_for_task892_gap_reverse_coreference_resolution": 40.0,
525
+ "predict_rougeL_for_task893_gap_fill_the_blank_coreference_resolution": 100.0,
526
+ "predict_rougeL_for_task935_defeasible_nli_atomic_classification": 60.0,
527
+ "predict_rougeL_for_task936_defeasible_nli_snli_classification": 40.0,
528
+ "predict_rougeL_for_task937_defeasible_nli_social_classification": 20.0,
529
+ "predict_rougeL_for_task957_e2e_nlg_text_generation_generate": 43.0283,
530
+ "predict_rougeL_for_task970_sherliic_causal_relationship": 100.0,
531
+ "predict_rougeL_for_textual_entailment": 47.7778,
532
+ "predict_rougeL_for_title_generation": 28.8976,
533
+ "predict_rougeL_for_word_analogy": 47.5,
534
+ "predict_runtime": 99.8176,
535
+ "predict_samples": 595,
536
+ "predict_samples_per_second": 5.961,
537
+ "predict_steps_per_second": 0.1
538
+ }
predicted_examples.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc4a7f7e6cc320120fcb2b52896260d9bb0b340ee4692b8c0819abf9279dd7ec
3
+ size 44540599945
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"]}
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "sp_model_kwargs": {}, "model_max_length": 512, "name_or_path": "google/t5-xxl-lm-adapt", "special_tokens_map_file": "/home/patrick/.cache/huggingface/transformers/5995b096c7f019eb696851d1af1c82271c3083fac6538e159d9b07ed718214d0.c94798918c92ded6aeef2d2f0e666d2cc4145eca1aa6e1336fde07f2e13e2f46", "tokenizer_class": "T5Tokenizer"}
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "train_loss": 0.7423103578629032,
4
+ "train_runtime": 191097.123,
5
+ "train_samples": 71917,
6
+ "train_samples_per_second": 0.753,
7
+ "train_steps_per_second": 0.094
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0,
5
+ "global_step": 17980,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 2.0,
12
+ "step": 17980,
13
+ "total_flos": 149889593901056.0,
14
+ "train_loss": 0.7423103578629032,
15
+ "train_runtime": 191097.123,
16
+ "train_samples_per_second": 0.753,
17
+ "train_steps_per_second": 0.094
18
+ }
19
+ ],
20
+ "max_steps": 17980,
21
+ "num_train_epochs": 2,
22
+ "total_flos": 149889593901056.0,
23
+ "trial_name": null,
24
+ "trial_params": null
25
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31bafcd1030465d4d69494bd6f53e715ab1a4ca11f5e999f908b963bf8b60024
3
+ size 4527