adapter-13classes-single_label / all_results.json
Faeze's picture
Upload folder using huggingface_hub
db7396c verified
{
"epoch": 200.0,
"eval_accuracy": 76.72413793103449,
"eval_average_metrics": 70.1131066055635,
"eval_classification_report": "{\"accusation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8333333333333334, \"f1-score\": 0.7407407407407408, \"support\": 12.0}, \"appreciation\": {\"precision\": 1.0, \"recall\": 0.75, \"f1-score\": 0.8571428571428571, \"support\": 8.0}, \"challenge\": {\"precision\": 0.7931034482758621, \"recall\": 0.7419354838709677, \"f1-score\": 0.7666666666666667, \"support\": 31.0}, \"evaluation\": {\"precision\": 1.0, \"recall\": 0.7777777777777778, \"f1-score\": 0.8750000000000001, \"support\": 9.0}, \"informing statement\": {\"precision\": 0.7297297297297297, \"recall\": 0.7297297297297297, \"f1-score\": 0.7297297297297297, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.7777777777777778, \"recall\": 0.7777777777777778, \"f1-score\": 0.7777777777777778, \"support\": 9.0}, \"request\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"accuracy\": 0.7672413793103449, \"macro avg\": {\"precision\": 0.8292430361395879, \"recall\": 0.8124303739223094, \"f1-score\": 0.8136203167453168, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7809155766944115, \"recall\": 0.7672413793103449, \"f1-score\": 0.7693520799124248, \"support\": 116.0}}",
"eval_f1_macro": 50.06894256894257,
"eval_f1_micro": 76.72413793103448,
"eval_f1_weighted": 76.93520799124248,
"eval_loss": 0.9065346121788025,
"eval_runtime": 1.1668,
"eval_samples_per_second": 99.413,
"init_mem_cpu_alloc_delta": -542769152,
"init_mem_cpu_peaked_delta": 542777344,
"init_mem_gpu_alloc_delta": 891528192,
"init_mem_gpu_peaked_delta": 0,
"peak_memory": 5.305544921875,
"test_accuracy": 76.72413793103449,
"test_average_metrics": 70.1131066055635,
"test_classification_report": "{\"accusation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8333333333333334, \"f1-score\": 0.7407407407407408, \"support\": 12.0}, \"appreciation\": {\"precision\": 1.0, \"recall\": 0.75, \"f1-score\": 0.8571428571428571, \"support\": 8.0}, \"challenge\": {\"precision\": 0.7931034482758621, \"recall\": 0.7419354838709677, \"f1-score\": 0.7666666666666667, \"support\": 31.0}, \"evaluation\": {\"precision\": 1.0, \"recall\": 0.7777777777777778, \"f1-score\": 0.8750000000000001, \"support\": 9.0}, \"informing statement\": {\"precision\": 0.7297297297297297, \"recall\": 0.7297297297297297, \"f1-score\": 0.7297297297297297, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.7777777777777778, \"recall\": 0.7777777777777778, \"f1-score\": 0.7777777777777778, \"support\": 9.0}, \"request\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"accuracy\": 0.7672413793103449, \"macro avg\": {\"precision\": 0.8292430361395879, \"recall\": 0.8124303739223094, \"f1-score\": 0.8136203167453168, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7809155766944115, \"recall\": 0.7672413793103449, \"f1-score\": 0.7693520799124248, \"support\": 116.0}}",
"test_f1_macro": 50.06894256894257,
"test_f1_micro": 76.72413793103448,
"test_f1_weighted": 76.93520799124248,
"test_loss": 0.9065346121788025,
"test_runtime": 1.187,
"test_samples_per_second": 97.724,
"total_time in minutes ": 39.80907916666666,
"train_mem_cpu_alloc_delta": 937439232,
"train_mem_cpu_peaked_delta": 40960,
"train_mem_gpu_alloc_delta": 40215552,
"train_mem_gpu_peaked_delta": 4624273408,
"train_runtime": 2386.5323,
"train_samples": 488,
"train_samples_per_second": 1.341
}