instructNER_ontonotes5_xl / test_results.json
supreethrao's picture
Model save
4e93afb
raw
history blame contribute delete
No virus
3.23 kB
{
"epoch": 3.0,
"test_CARDINAL": {
"f1": 0.8551502145922747,
"number": 935,
"precision": 0.8579117330462863,
"recall": 0.8524064171122995
},
"test_DATE": {
"f1": 0.8761552680221812,
"number": 1599,
"precision": 0.8633879781420765,
"recall": 0.8893058161350844
},
"test_EVENT": {
"f1": 0.6825396825396826,
"number": 62,
"precision": 0.671875,
"recall": 0.6935483870967742
},
"test_FAC": {
"f1": 0.7517730496453903,
"number": 135,
"precision": 0.7210884353741497,
"recall": 0.7851851851851852
},
"test_GPE": {
"f1": 0.9686098654708519,
"number": 2239,
"precision": 0.9725348941918055,
"recall": 0.964716391246092
},
"test_LANGUAGE": {
"f1": 0.7222222222222223,
"number": 22,
"precision": 0.9285714285714286,
"recall": 0.5909090909090909
},
"test_LAW": {
"f1": 0.7605633802816901,
"number": 37,
"precision": 0.7941176470588235,
"recall": 0.7297297297297297
},
"test_LOC": {
"f1": 0.7859078590785907,
"number": 179,
"precision": 0.7631578947368421,
"recall": 0.8100558659217877
},
"test_MONEY": {
"f1": 0.8899521531100479,
"number": 314,
"precision": 0.8913738019169329,
"recall": 0.8885350318471338
},
"test_NORP": {
"f1": 0.947429906542056,
"number": 841,
"precision": 0.931113662456946,
"recall": 0.9643281807372176
},
"test_ORDINAL": {
"f1": 0.8722891566265061,
"number": 195,
"precision": 0.8227272727272728,
"recall": 0.9282051282051282
},
"test_ORG": {
"f1": 0.9144625773776026,
"number": 1791,
"precision": 0.921724333522405,
"recall": 0.9073143495254048
},
"test_PERCENT": {
"f1": 0.9171428571428571,
"number": 349,
"precision": 0.9145299145299145,
"recall": 0.9197707736389685
},
"test_PERSON": {
"f1": 0.9640432486799095,
"number": 1988,
"precision": 0.9638009049773756,
"recall": 0.9642857142857143
},
"test_PRODUCT": {
"f1": 0.7349397590361447,
"number": 76,
"precision": 0.6777777777777778,
"recall": 0.8026315789473685
},
"test_QUANTITY": {
"f1": 0.7924528301886793,
"number": 105,
"precision": 0.7850467289719626,
"recall": 0.8
},
"test_TIME": {
"f1": 0.6761904761904762,
"number": 211,
"precision": 0.6794258373205742,
"recall": 0.6729857819905213
},
"test_WORK_OF_ART": {
"f1": 0.65015479876161,
"number": 163,
"precision": 0.65625,
"recall": 0.6441717791411042
},
"test_loss": 0.00661951769143343,
"test_overall_accuracy": 0.982111989942905,
"test_overall_f1": 0.9077127659574469,
"test_overall_precision": 0.9045852107076597,
"test_overall_recall": 0.9108620229516947,
"test_runtime": 34.2561,
"test_samples_per_second": 277.382,
"test_steps_per_second": 8.67
}