Upload 22 files
Browse files- llama_dora_commonsense_checkpoints/.DS_Store +0 -0
- llama_dora_commonsense_checkpoints/LLama-7B/.DS_Store +0 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r16/adapter_config.json +23 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r16/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r32/adapter_config.json +23 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r32/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r32_qkv/adapter_config.json +26 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r32_qkv/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r4/adapter_config.json +23 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r4/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r64/adapter_config.json +23 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r64/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r8/adapter_config.json +23 -0
- llama_dora_commonsense_checkpoints/LLama-7B/dora_r8/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama2-7B/dora_r16/adapter_config.json +24 -0
- llama_dora_commonsense_checkpoints/LLama2-7B/dora_r16/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama2-7B/dora_r32/adapter_config.json +24 -0
- llama_dora_commonsense_checkpoints/LLama2-7B/dora_r32/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama3-8B/dora_r16/adapter_config.json +24 -0
- llama_dora_commonsense_checkpoints/LLama3-8B/dora_r16/adapter_model.bin +3 -0
- llama_dora_commonsense_checkpoints/LLama3-8B/dora_r32/adapter_config.json +24 -0
- llama_dora_commonsense_checkpoints/LLama3-8B/dora_r32/adapter_model.bin +3 -0
llama_dora_commonsense_checkpoints/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
llama_dora_commonsense_checkpoints/LLama-7B/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r16/adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "/root/llama_7b",
|
4 |
+
"bias": "none",
|
5 |
+
"enable_lora": null,
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"lora_alpha": 32,
|
9 |
+
"lora_dropout": 0.05,
|
10 |
+
"merge_weights": false,
|
11 |
+
"modules_to_save": null,
|
12 |
+
"peft_type": "DORA",
|
13 |
+
"r": 16,
|
14 |
+
"dora_simple": true,
|
15 |
+
"target_modules": [
|
16 |
+
"q_proj",
|
17 |
+
"k_proj",
|
18 |
+
"v_proj",
|
19 |
+
"up_proj",
|
20 |
+
"down_proj"
|
21 |
+
],
|
22 |
+
"task_type": "CAUSAL_LM"
|
23 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r16/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40c572e61ca8bd85502b8fcf0881add1d0b8aeefe21ef6c7c38a829b7fa59dd4
|
3 |
+
size 115878093
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r32/adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "yahma/llama-7b-hf",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 64,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"peft_type": "DORA",
|
14 |
+
"r": 32,
|
15 |
+
"target_modules": [
|
16 |
+
"q_proj",
|
17 |
+
"k_proj",
|
18 |
+
"v_proj",
|
19 |
+
"up_proj",
|
20 |
+
"down_proj"
|
21 |
+
],
|
22 |
+
"task_type": "CAUSAL_LM"
|
23 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r32/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df43b5667456c1b16b61e1ab77228f9f2c69ece328a3e1bcb8c075d926c47c10
|
3 |
+
size 228076170
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r32_qkv/adapter_config.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": [
|
3 |
+
"o_proj",
|
4 |
+
"gate_proj",
|
5 |
+
"up_proj",
|
6 |
+
"down_proj"
|
7 |
+
],
|
8 |
+
"base_model_name_or_path": "/root/llama_7b",
|
9 |
+
"bias": "none",
|
10 |
+
"enable_lora": null,
|
11 |
+
"fan_in_fan_out": false,
|
12 |
+
"inference_mode": true,
|
13 |
+
"lora_alpha": 64,
|
14 |
+
"lora_dropout": 0.05,
|
15 |
+
"merge_weights": false,
|
16 |
+
"modules_to_save": null,
|
17 |
+
"peft_type": "DORA",
|
18 |
+
"r": 32,
|
19 |
+
"dora_simple": true,
|
20 |
+
"target_modules": [
|
21 |
+
"q_proj",
|
22 |
+
"k_proj",
|
23 |
+
"v_proj"
|
24 |
+
],
|
25 |
+
"task_type": "CAUSAL_LM"
|
26 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r32_qkv/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e9a5051d0c3750ab6edf22e0d9dfa5f2865793c4e18f56e5a345e528dbd0883
|
3 |
+
size 106254733
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r4/adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "/root/llama_7b",
|
4 |
+
"bias": "none",
|
5 |
+
"enable_lora": null,
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"lora_alpha": 8,
|
9 |
+
"lora_dropout": 0.05,
|
10 |
+
"merge_weights": false,
|
11 |
+
"modules_to_save": null,
|
12 |
+
"peft_type": "DORA",
|
13 |
+
"r": 4,
|
14 |
+
"dora_simple": true,
|
15 |
+
"target_modules": [
|
16 |
+
"q_proj",
|
17 |
+
"k_proj",
|
18 |
+
"v_proj",
|
19 |
+
"up_proj",
|
20 |
+
"down_proj"
|
21 |
+
],
|
22 |
+
"task_type": "CAUSAL_LM"
|
23 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r4/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:677bcf78a79b6eb46fc006ce0dd9987ac194d07bfaaf0504bc2e3e06ed66a3de
|
3 |
+
size 31729229
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r64/adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "yahma/llama-7b-hf",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 128,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"peft_type": "DORA",
|
14 |
+
"r": 64,
|
15 |
+
"target_modules": [
|
16 |
+
"q_proj",
|
17 |
+
"k_proj",
|
18 |
+
"v_proj",
|
19 |
+
"up_proj",
|
20 |
+
"down_proj"
|
21 |
+
],
|
22 |
+
"task_type": "CAUSAL_LM"
|
23 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r64/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f7c06ff45d099af201361eb2585a7a3d9db7757bfdb6c5bd52477b7c3477944
|
3 |
+
size 452464269
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r8/adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "/root/llama_7b",
|
4 |
+
"bias": "none",
|
5 |
+
"enable_lora": null,
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"lora_alpha": 16,
|
9 |
+
"lora_dropout": 0.05,
|
10 |
+
"merge_weights": false,
|
11 |
+
"modules_to_save": null,
|
12 |
+
"peft_type": "DORA",
|
13 |
+
"r": 8,
|
14 |
+
"dora_simple": true,
|
15 |
+
"target_modules": [
|
16 |
+
"q_proj",
|
17 |
+
"k_proj",
|
18 |
+
"v_proj",
|
19 |
+
"up_proj",
|
20 |
+
"down_proj"
|
21 |
+
],
|
22 |
+
"task_type": "CAUSAL_LM"
|
23 |
+
}
|
llama_dora_commonsense_checkpoints/LLama-7B/dora_r8/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:656455faa27b6c13bbeb3038728da9541f9e3546ad0a77881fde2bdc3035db93
|
3 |
+
size 59778765
|
llama_dora_commonsense_checkpoints/LLama2-7B/dora_r16/adapter_config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 32,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"only_tune_direction": false,
|
14 |
+
"peft_type": "DORA",
|
15 |
+
"r": 16,
|
16 |
+
"target_modules": [
|
17 |
+
"q_proj",
|
18 |
+
"k_proj",
|
19 |
+
"v_proj",
|
20 |
+
"up_proj",
|
21 |
+
"down_proj"
|
22 |
+
],
|
23 |
+
"task_type": "CAUSAL_LM"
|
24 |
+
}
|
llama_dora_commonsense_checkpoints/LLama2-7B/dora_r16/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ff4fd3f852d1886a096e214745d90f3867e189868b72ff371ca852859e7b73a
|
3 |
+
size 115878538
|
llama_dora_commonsense_checkpoints/LLama2-7B/dora_r32/adapter_config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 64,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"only_tune_direction": false,
|
14 |
+
"peft_type": "DORA",
|
15 |
+
"r": 32,
|
16 |
+
"target_modules": [
|
17 |
+
"q_proj",
|
18 |
+
"k_proj",
|
19 |
+
"v_proj",
|
20 |
+
"up_proj",
|
21 |
+
"down_proj"
|
22 |
+
],
|
23 |
+
"task_type": "CAUSAL_LM"
|
24 |
+
}
|
llama_dora_commonsense_checkpoints/LLama2-7B/dora_r32/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4dc231bf296bea36fe700d50bd6ee259eba3d3595a0255beb1ffc97bdcdf0d6a
|
3 |
+
size 228076170
|
llama_dora_commonsense_checkpoints/LLama3-8B/dora_r16/adapter_config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "meta-llama/Meta-Llama-3-8B",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 32,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"only_tune_direction": false,
|
14 |
+
"peft_type": "DORA",
|
15 |
+
"r": 16,
|
16 |
+
"target_modules": [
|
17 |
+
"q_proj",
|
18 |
+
"k_proj",
|
19 |
+
"v_proj",
|
20 |
+
"up_proj",
|
21 |
+
"down_proj"
|
22 |
+
],
|
23 |
+
"task_type": "CAUSAL_LM"
|
24 |
+
}
|
llama_dora_commonsense_checkpoints/LLama3-8B/dora_r16/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0ba2f6cb24fa2ac950d4c9820253ac35940d5a0fd40e84e1d9f9bdd1483877de
|
3 |
+
size 116566538
|
llama_dora_commonsense_checkpoints/LLama3-8B/dora_r32/adapter_config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Wdecompose_target_modules": null,
|
3 |
+
"base_model_name_or_path": "meta-llama/Meta-Llama-3-8B",
|
4 |
+
"bias": "none",
|
5 |
+
"dora_simple": true,
|
6 |
+
"enable_lora": null,
|
7 |
+
"fan_in_fan_out": false,
|
8 |
+
"inference_mode": true,
|
9 |
+
"lora_alpha": 64,
|
10 |
+
"lora_dropout": 0.05,
|
11 |
+
"merge_weights": false,
|
12 |
+
"modules_to_save": null,
|
13 |
+
"only_tune_direction": false,
|
14 |
+
"peft_type": "DORA",
|
15 |
+
"r": 32,
|
16 |
+
"target_modules": [
|
17 |
+
"q_proj",
|
18 |
+
"k_proj",
|
19 |
+
"v_proj",
|
20 |
+
"up_proj",
|
21 |
+
"down_proj"
|
22 |
+
],
|
23 |
+
"task_type": "CAUSAL_LM"
|
24 |
+
}
|
llama_dora_commonsense_checkpoints/LLama3-8B/dora_r32/adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bcadf51315854cdd1840b5f8ede293102b011fece0b87a7b97aa3b1870b10886
|
3 |
+
size 229812746
|