jikaixuan commited on
Commit
545dc89
1 Parent(s): 4594ddf

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,10 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "k_proj",
23
  "o_proj",
24
  "v_proj",
25
- "q_proj"
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "alignment-handbook/zephyr-7b-sft-full",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "o_proj",
23
  "v_proj",
24
+ "q_proj",
25
+ "k_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:00939d9330e0ca755ea426cd1027d9495c60be5fc0f3709bd197a1640cb76326
3
  size 109086672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9407ea6e7e1bd3d0cfa121dd4f94b543e6b391f9ee725c293b7516cf81ee6f67
3
  size 109086672
runs/Jan19_16-29-15_uclaml03.cs.ucla.edu/events.out.tfevents.1705710616.uclaml03.cs.ucla.edu.481426.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c0ede911e6beb9b69a04a2087176e34f1c7ac207982cc6e2f4cadbd6e39db84
3
+ size 5148
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3939afbabc4baeb60d396786c0ae42b352034e4fb51ab2354f56d3c071536d9f
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28c34aade35021f11c4769f94628f1f2c94899f68a677daeb0e07c6fb4792853
3
  size 4792