PaulD commited on
Commit
a26a8f7
1 Parent(s): d95b320

End of training

Browse files
README.md CHANGED
@@ -18,13 +18,13 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [meta-llama/Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 1.9373
22
- - Eval/rewards/chosen: 1.7801
23
- - Eval/logps/chosen: -189.7792
24
- - Eval/rewards/rejected: 1.3841
25
- - Eval/logps/rejected: -207.7363
26
- - Eval/rewards/margins: 0.3960
27
- - Eval/kl: 13.1057
28
 
29
  ## Model description
30
 
@@ -43,7 +43,7 @@ More information needed
43
  ### Training hyperparameters
44
 
45
  The following hyperparameters were used during training:
46
- - learning_rate: 0.0001
47
  - train_batch_size: 1
48
  - eval_batch_size: 2
49
  - seed: 9012
@@ -56,14 +56,14 @@ The following hyperparameters were used during training:
56
 
57
  ### Training results
58
 
59
- | Training Loss | Epoch | Step | Validation Loss | |
60
- |:-------------:|:------:|:----:|:---------------:|:-------:|
61
- | 0.8263 | 0.9412 | 12 | 0.7743 | 0.0 |
62
- | 0.827 | 1.9608 | 25 | 0.7659 | 0.0 |
63
- | 0.4971 | 2.9804 | 38 | 1.9715 | 9.9657 |
64
- | 0.2863 | 4.0 | 51 | 1.9235 | 11.1631 |
65
- | 0.2741 | 4.9412 | 63 | 1.9366 | 13.0766 |
66
- | 0.428 | 5.6471 | 72 | 1.9373 | 13.1057 |
67
 
68
 
69
  ### Framework versions
 
18
 
19
  This model is a fine-tuned version of [meta-llama/Meta-Llama-3-8B-Instruct](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 1.6916
22
+ - Eval/rewards/chosen: -0.4209
23
+ - Eval/logps/chosen: -211.7889
24
+ - Eval/rewards/rejected: -0.4483
25
+ - Eval/logps/rejected: -226.0602
26
+ - Eval/rewards/margins: 0.0274
27
+ - Eval/kl: 0.0
28
 
29
  ## Model description
30
 
 
43
  ### Training hyperparameters
44
 
45
  The following hyperparameters were used during training:
46
+ - learning_rate: 1e-05
47
  - train_batch_size: 1
48
  - eval_batch_size: 2
49
  - seed: 9012
 
56
 
57
  ### Training results
58
 
59
+ | Training Loss | Epoch | Step | Validation Loss | |
60
+ |:-------------:|:------:|:----:|:---------------:|:------:|
61
+ | 1.074 | 0.9412 | 12 | 1.9606 | 0.0013 |
62
+ | 1.0223 | 1.9608 | 25 | 1.8546 | 0.0 |
63
+ | 0.9832 | 2.9804 | 38 | 1.7737 | 0.0 |
64
+ | 0.9095 | 4.0 | 51 | 1.6998 | 0.0 |
65
+ | 0.9155 | 4.9412 | 63 | 1.6833 | 0.0 |
66
+ | 0.5901 | 5.6471 | 72 | 1.6916 | 0.0 |
67
 
68
 
69
  ### Framework versions
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "q_proj",
24
- "v_proj",
25
  "k_proj",
26
- "o_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "o_proj",
24
  "q_proj",
 
25
  "k_proj",
26
+ "v_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:35c7d33d32a283cbdb481d2da12a823358c4a1563dd3875722c78b034cf5aa62
3
  size 27297544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d4f4801933fb10ed34a020af01f57536309084c7d678224b7c23af10b7bcf2a
3
  size 27297544
metrics.jsonl CHANGED
@@ -1,126 +1,5 @@
1
- {"epoch": 0.96, "precision": 0.6428571405612244, "recall": 0.9999999944444444, "fold": 0}
2
- {"epoch": 2.0, "precision": 0.749999996875, "recall": 0.9999999944444444, "fold": 0}
3
- {"epoch": 2.96, "precision": 0.8888888839506173, "recall": 0.8888888839506173, "fold": 0}
4
- {"epoch": 3.84, "precision": 0.8421052587257617, "recall": 0.8888888839506173, "fold": 0}
5
- {"epoch": 0.96, "precision": 0.4324324312636961, "recall": 0.8888888839506173, "fold": 0}
6
- {"epoch": 2.0, "precision": 0.4857142843265306, "recall": 0.9444444391975308, "fold": 0}
7
- {"epoch": 2.96, "precision": 0.586206894530321, "recall": 0.9444444391975308, "fold": 0}
8
- {"epoch": 3.84, "precision": 0.45454545316804407, "recall": 0.8333333287037037, "fold": 0}
9
- {"epoch": 0.96, "precision": 0.45945945821767714, "recall": 0.9444444391975308, "fold": 0}
10
- {"epoch": 2.0, "precision": 0.7368421013850416, "recall": 0.7777777734567901, "fold": 0}
11
- {"epoch": 2.96, "precision": 0.7083333303819445, "recall": 0.9444444391975308, "fold": 0}
12
- {"epoch": 3.84, "precision": 0.6799999972799999, "recall": 0.9444444391975308, "fold": 0}
13
- {"epoch": 0.96, "precision": 0.37499999921875, "recall": 0.9999999944444444, "fold": 0}
14
- {"epoch": 2.0, "precision": 0.3529411757785467, "recall": 0.9999999944444444, "fold": 0}
15
- {"epoch": 2.96, "precision": 0.3333333326797386, "recall": 0.9444444391975308, "fold": 0}
16
- {"epoch": 3.84, "precision": 0.33333333271604937, "recall": 0.9999999944444444, "fold": 0}
17
- {"epoch": 0.9411764705882353, "precision": 0.47058823437139563, "recall": 0.9999999958333333, "fold": 0}
18
- {"epoch": 1.9607843137254903, "precision": 0.6666666646464646, "recall": 0.9166666628472222, "fold": 0}
19
- {"epoch": 2.980392156862745, "precision": 0.6451612882414152, "recall": 0.8333333298611111, "fold": 0}
20
- {"epoch": 3.764705882352941, "precision": 0.6451612882414152, "recall": 0.8333333298611111, "fold": 0}
21
- {"epoch": 0.9411764705882353, "precision": 0.4313725481737793, "recall": 0.9166666628472222, "fold": 0}
22
- {"epoch": 1.9607843137254903, "precision": 0.4999999989130435, "recall": 0.9583333293402777, "fold": 0}
23
- {"epoch": 2.980392156862745, "precision": 0.4693877541441066, "recall": 0.9583333293402777, "fold": 0}
24
- {"epoch": 3.764705882352941, "precision": 0.4423076914571006, "recall": 0.9583333293402777, "fold": 0}
25
- {"epoch": 0.96, "precision": 0.5624999982421874, "recall": 0.9999999944444444, "fold": 0}
26
- {"epoch": 2.0, "precision": 0.749999996875, "recall": 0.9999999944444444, "fold": 0}
27
- {"epoch": 2.96, "precision": 0.9999999933333334, "recall": 0.8333333287037037, "fold": 0}
28
- {"epoch": 4.0, "precision": 0.9999999923076924, "recall": 0.7222222182098765, "fold": 0}
29
- {"epoch": 4.96, "precision": 0.9999999928571429, "recall": 0.7777777734567901, "fold": 0}
30
- {"epoch": 5.76, "precision": 0.8888888839506173, "recall": 0.8888888839506173, "fold": 0}
31
- {"epoch": 0.96, "precision": 0.4324324312636961, "recall": 0.8888888839506173, "fold": 0}
32
- {"epoch": 2.0, "precision": 0.5142857128163265, "recall": 0.9999999944444444, "fold": 0}
33
- {"epoch": 2.96, "precision": 0.51515151359045, "recall": 0.9444444391975308, "fold": 0}
34
- {"epoch": 4.0, "precision": 0.4444444432098765, "recall": 0.8888888839506173, "fold": 0}
35
- {"epoch": 4.96, "precision": 0.5624999982421874, "recall": 0.9999999944444444, "fold": 0}
36
- {"epoch": 5.76, "precision": 0.5624999982421874, "recall": 0.9999999944444444, "fold": 0}
37
- {"epoch": 0.96, "precision": 0.7368421013850416, "recall": 0.7777777734567901, "fold": 0}
38
- {"epoch": 2.0, "precision": 0.9166666590277779, "recall": 0.6111111077160494, "fold": 0}
39
- {"epoch": 2.96, "precision": 0.9090909008264464, "recall": 0.5555555524691358, "fold": 0}
40
- {"epoch": 4.0, "precision": 0.5999999976, "recall": 0.8333333287037037, "fold": 0}
41
- {"epoch": 4.96, "precision": 0.8571428530612245, "recall": 0.9999999944444444, "fold": 0}
42
- {"epoch": 5.76, "precision": 0.6521739102079395, "recall": 0.8333333287037037, "fold": 0}
43
- {"epoch": 0.96, "precision": 0.4736842092797784, "recall": 0.9999999944444444, "fold": 0}
44
- {"epoch": 2.0, "precision": 0.4615384603550296, "recall": 0.9999999944444444, "fold": 0}
45
- {"epoch": 2.96, "precision": 0.4999999984375, "recall": 0.8888888839506173, "fold": 0}
46
- {"epoch": 4.0, "precision": 0.5454545438016529, "recall": 0.9999999944444444, "fold": 0}
47
- {"epoch": 4.96, "precision": 0.49999999852941174, "recall": 0.9444444391975308, "fold": 0}
48
- {"epoch": 5.76, "precision": 0.47058823391003457, "recall": 0.8888888839506173, "fold": 0}
49
- {"epoch": 0.96, "precision": 0.7999999946666667, "recall": 0.666666662962963, "fold": 0}
50
- {"epoch": 2.0, "precision": 0.9999999500000026, "recall": 0.11111111049382716, "fold": 0}
51
- {"epoch": 2.96, "precision": 0.0, "recall": 0.0, "fold": 0}
52
- {"epoch": 4.0, "precision": 0.0, "recall": 0.0, "fold": 0}
53
- {"epoch": 4.96, "precision": 0.0, "recall": 0.0, "fold": 0}
54
- {"epoch": 5.76, "precision": 0.0, "recall": 0.0, "fold": 0}
55
- {"epoch": 0.96, "precision": 0.4285714275510204, "recall": 0.9999999944444444, "fold": 0}
56
- {"epoch": 2.0, "precision": 0.45945945821767714, "recall": 0.9444444391975308, "fold": 0}
57
- {"epoch": 2.96, "precision": 0.5294117631487889, "recall": 0.9999999944444444, "fold": 0}
58
- {"epoch": 4.0, "precision": 0.5624999982421874, "recall": 0.9999999944444444, "fold": 0}
59
- {"epoch": 4.96, "precision": 0.6153846130177515, "recall": 0.8888888839506173, "fold": 0}
60
- {"epoch": 5.76, "precision": 0.5999999976, "recall": 0.8333333287037037, "fold": 0}
61
- {"epoch": 0.96, "precision": 0.4210526304709141, "recall": 0.8888888839506173, "fold": 0}
62
- {"epoch": 2.0, "precision": 0.8461538396449705, "recall": 0.6111111077160494, "fold": 0}
63
- {"epoch": 2.96, "precision": 0.5714285693877551, "recall": 0.8888888839506173, "fold": 0}
64
- {"epoch": 4.0, "precision": 0.6363636334710744, "recall": 0.7777777734567901, "fold": 0}
65
- {"epoch": 4.96, "precision": 0.6666666638888888, "recall": 0.8888888839506173, "fold": 0}
66
- {"epoch": 5.76, "precision": 0.5517241360285374, "recall": 0.8888888839506173, "fold": 0}
67
- {"epoch": 0.96, "precision": 0.28846153790680473, "recall": 0.8333333287037037, "fold": 0}
68
- {"epoch": 2.0, "precision": 0.38297872258940696, "recall": 0.9999999944444444, "fold": 0}
69
- {"epoch": 2.96, "precision": 0.33333333259259257, "recall": 0.8333333287037037, "fold": 0}
70
- {"epoch": 4.0, "precision": 0.3673469380258226, "recall": 0.9999999944444444, "fold": 0}
71
- {"epoch": 4.96, "precision": 0.3617021268899955, "recall": 0.9444444391975308, "fold": 0}
72
- {"epoch": 5.76, "precision": 0.33999999931999997, "recall": 0.9444444391975308, "fold": 0}
73
- {"epoch": 0.96, "precision": 0.4999999984375, "recall": 0.8888888839506173, "fold": 0}
74
- {"epoch": 2.0, "precision": 0.5333333315555555, "recall": 0.8888888839506173, "fold": 0}
75
- {"epoch": 2.96, "precision": 0.6799999972799999, "recall": 0.9444444391975308, "fold": 0}
76
- {"epoch": 4.0, "precision": 0.6999999965, "recall": 0.7777777734567901, "fold": 0}
77
- {"epoch": 4.96, "precision": 0.5999999976, "recall": 0.8333333287037037, "fold": 0}
78
- {"epoch": 5.76, "precision": 0.6799999972799999, "recall": 0.9444444391975308, "fold": 0}
79
- {"epoch": 0.96, "precision": 0.34042553119058394, "recall": 0.8888888839506173, "fold": 0}
80
- {"epoch": 2.0, "precision": 0.37499999921875, "recall": 0.9999999944444444, "fold": 0}
81
- {"epoch": 2.96, "precision": 0.35999999928, "recall": 0.9999999944444444, "fold": 0}
82
- {"epoch": 4.0, "precision": 0.3333333326797386, "recall": 0.9444444391975308, "fold": 0}
83
- {"epoch": 4.96, "precision": 0.3478260862003781, "recall": 0.8888888839506173, "fold": 0}
84
- {"epoch": 5.76, "precision": 0.3673469380258226, "recall": 0.9999999944444444, "fold": 0}
85
- {"epoch": 0.96, "precision": 0.4411764692906574, "recall": 0.8333333287037037, "fold": 0}
86
- {"epoch": 2.0, "precision": 0.5925925903978052, "recall": 0.8888888839506173, "fold": 0}
87
- {"epoch": 2.96, "precision": 0.7272727239669421, "recall": 0.8888888839506173, "fold": 0}
88
- {"epoch": 4.0, "precision": 0.5714285693877551, "recall": 0.8888888839506173, "fold": 0}
89
- {"epoch": 4.96, "precision": 0.5652173888468809, "recall": 0.7222222182098765, "fold": 0}
90
- {"epoch": 5.76, "precision": 0.6666666634920635, "recall": 0.7777777734567901, "fold": 0}
91
- {"epoch": 0.96, "precision": 0.357142856292517, "recall": 0.8333333287037037, "fold": 0}
92
- {"epoch": 2.0, "precision": 0.35416666592881946, "recall": 0.9444444391975308, "fold": 0}
93
- {"epoch": 2.96, "precision": 0.3913043469754253, "recall": 0.9999999944444444, "fold": 0}
94
- {"epoch": 4.0, "precision": 0.3809523800453515, "recall": 0.8888888839506173, "fold": 0}
95
- {"epoch": 4.96, "precision": 0.39534883628988643, "recall": 0.9444444391975308, "fold": 0}
96
- {"epoch": 5.76, "precision": 0.37499999921875, "recall": 0.9999999944444444, "fold": 0}
97
- {"epoch": 0.9411764705882353, "precision": 0.47826086852551986, "recall": 0.9166666628472222, "fold": 0}
98
- {"epoch": 1.9607843137254903, "precision": 0.6486486468955441, "recall": 0.9999999958333333, "fold": 0}
99
- {"epoch": 2.980392156862745, "precision": 0.7916666633680556, "recall": 0.7916666633680556, "fold": 0}
100
- {"epoch": 4.0, "precision": 0.758620687039239, "recall": 0.9166666628472222, "fold": 0}
101
- {"epoch": 4.9411764705882355, "precision": 0.758620687039239, "recall": 0.9166666628472222, "fold": 0}
102
- {"epoch": 5.647058823529412, "precision": 0.6785714261479592, "recall": 0.7916666633680556, "fold": 0}
103
- {"epoch": 0.9411764705882353, "precision": 0.4035087712219144, "recall": 0.9583333293402777, "fold": 0}
104
- {"epoch": 1.9607843137254903, "precision": 0.49999999886363633, "recall": 0.9166666628472222, "fold": 0}
105
- {"epoch": 2.980392156862745, "precision": 0.45098039127258743, "recall": 0.9583333293402777, "fold": 0}
106
- {"epoch": 4.0, "precision": 0.46808510538705295, "recall": 0.9166666628472222, "fold": 0}
107
- {"epoch": 4.9411764705882355, "precision": 0.4259259251371742, "recall": 0.9583333293402777, "fold": 0}
108
- {"epoch": 5.647058823529412, "precision": 0.4693877541441066, "recall": 0.9583333293402777, "fold": 0}
109
- {"epoch": 0.9411764705882353, "precision": 0.5111111099753086, "recall": 0.9583333293402777, "fold": 0}
110
- {"epoch": 1.9607843137254903, "precision": 0.5483870950052029, "recall": 0.7083333303819445, "fold": 0}
111
- {"epoch": 2.980392156862745, "precision": 0.6785714261479592, "recall": 0.7916666633680556, "fold": 0}
112
- {"epoch": 4.0, "precision": 0.7096774170655566, "recall": 0.9166666628472222, "fold": 0}
113
- {"epoch": 4.9411764705882355, "precision": 0.7916666633680556, "recall": 0.7916666633680556, "fold": 0}
114
- {"epoch": 5.647058823529412, "precision": 0.7999999968, "recall": 0.8333333298611111, "fold": 0}
115
- {"epoch": 0.9411764705882353, "precision": 0.4444444436213992, "recall": 0.9999999958333333, "fold": 0}
116
- {"epoch": 1.9607843137254903, "precision": 0.47916666566840277, "recall": 0.9583333293402777, "fold": 0}
117
- {"epoch": 2.980392156862745, "precision": 0.45652173813799624, "recall": 0.8749999963541666, "fold": 0}
118
- {"epoch": 4.0, "precision": 0.4230769222633136, "recall": 0.9166666628472222, "fold": 0}
119
- {"epoch": 4.9411764705882355, "precision": 0.41999999915999997, "recall": 0.8749999963541666, "fold": 0}
120
- {"epoch": 5.647058823529412, "precision": 0.47916666566840277, "recall": 0.9583333293402777, "fold": 0}
121
- {"epoch": 0.9411764705882353, "precision": 0.5238095225623582, "recall": 0.9166666628472222, "fold": 0}
122
- {"epoch": 1.9607843137254903, "precision": 0.6571428552653061, "recall": 0.9583333293402777, "fold": 0}
123
- {"epoch": 2.980392156862745, "precision": 0.47058823437139563, "recall": 0.9999999958333333, "fold": 0}
124
- {"epoch": 4.0, "precision": 0.7407407379972565, "recall": 0.8333333298611111, "fold": 0}
125
- {"epoch": 4.9411764705882355, "precision": 0.6896551700356718, "recall": 0.8333333298611111, "fold": 0}
126
- {"epoch": 5.647058823529412, "precision": 0.7037037010973937, "recall": 0.7916666633680556, "fold": 0}
 
1
+ {"epoch": 1.9607843137254903, "precision": 0.4897959173677634, "recall": 0.9999999958333333, "fold": 0}
2
+ {"epoch": 2.980392156862745, "precision": 0.48888888780246914, "recall": 0.9166666628472222, "fold": 0}
3
+ {"epoch": 4.0, "precision": 0.45999999908, "recall": 0.9583333293402777, "fold": 0}
4
+ {"epoch": 4.9411764705882355, "precision": 0.47058823437139563, "recall": 0.9999999958333333, "fold": 0}
5
+ {"epoch": 5.647058823529412, "precision": 0.4038461530695266, "recall": 0.8749999963541666, "fold": 0}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
metrics_epoch_1.9607843137254903_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"epoch": 1.9607843137254903, "precision": 0.4897959173677634, "recall": 0.9999999958333333, "fold": 0}
metrics_epoch_2.980392156862745_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"epoch": 2.980392156862745, "precision": 0.48888888780246914, "recall": 0.9166666628472222, "fold": 0}
metrics_epoch_4.0_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"epoch": 4.0, "precision": 0.45999999908, "recall": 0.9583333293402777, "fold": 0}
metrics_epoch_4.9411764705882355_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"epoch": 4.9411764705882355, "precision": 0.47058823437139563, "recall": 0.9999999958333333, "fold": 0}
metrics_epoch_5.647058823529412_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"epoch": 5.647058823529412, "precision": 0.4038461530695266, "recall": 0.8749999963541666, "fold": 0}
results_epoch_1.9607843137254903_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
The diff for this file is too large to render. See raw diff
 
results_epoch_2.980392156862745_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
The diff for this file is too large to render. See raw diff
 
results_epoch_4.0_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
The diff for this file is too large to render. See raw diff
 
results_epoch_4.9411764705882355_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
The diff for this file is too large to render. See raw diff
 
results_epoch_5.647058823529412_fold_0_lr_1e-05_seed_9012_weight_10.0.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:02fd3c6ce3518dfa2edf93ab61e2c61d6105c19d097f9404884b700243c420da
3
  size 5688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12c8fd0a4b73198cfb183f8b9c6d47cfa697f16811760ffce0155941f1d89d93
3
  size 5688