jikaixuan commited on
Commit
63fe91b
1 Parent(s): e72591b

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -8,21 +8,18 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "loftq_config": {},
12
  "lora_alpha": 16,
13
  "lora_dropout": 0.1,
14
- "megatron_config": null,
15
- "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
  "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "k_proj",
23
- "q_proj",
24
  "v_proj",
25
- "o_proj"
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 16,
12
  "lora_dropout": 0.1,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 64,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "o_proj",
 
20
  "v_proj",
21
+ "k_proj",
22
+ "q_proj"
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8076e5dc03e12753400834d0702a6e4b2187c17f8da42401fa5933c645e14246
3
- size 109086672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:305d6778243080218eefa49126ed3c002ed2be7639be758082b3b3a43d81593e
3
+ size 218138576
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0cc76df2851d4146f789fd37f9db61180778b9f7f4bb177bcab529cc595e76d3
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d24033a72c9e150be8c7752cf8fdcf8e3b812ddac50133cb79a143c41719150
3
  size 4792