Update xlora_config.json
Browse files- xlora_config.json +9 -9
xlora_config.json
CHANGED
@@ -1,15 +1,15 @@
|
|
1 |
{
|
2 |
"base_model_id": "HuggingFaceH4/zephyr-7b-beta",
|
3 |
"adapters": {
|
4 |
-
"adapter_1": "lamm-mit/x-lora/
|
5 |
-
"adapter_2": "lamm-mit/x-lora/
|
6 |
-
"adapter_3": "lamm-mit/x-lora/
|
7 |
-
"adapter_4": "lamm-mit/x-lora/
|
8 |
-
"adapter_5": "lamm-mit/x-lora/
|
9 |
-
"adapter_6": "lamm-mit/x-lora/
|
10 |
-
"adapter_7": "lamm-mit/x-lora/
|
11 |
-
"adapter_8": "lamm-mit/x-lora/
|
12 |
-
"adapter_9": "lamm-mit/x-lora/
|
13 |
},
|
14 |
"hidden_size": 4096,
|
15 |
"enable_softmax": true,
|
|
|
1 |
{
|
2 |
"base_model_id": "HuggingFaceH4/zephyr-7b-beta",
|
3 |
"adapters": {
|
4 |
+
"adapter_1": "lamm-mit/x-lora/adapter_1",
|
5 |
+
"adapter_2": "lamm-mit/x-lora/adapter_2/",
|
6 |
+
"adapter_3": "lamm-mit/x-lora/adapter_3/",
|
7 |
+
"adapter_4": "lamm-mit/x-lora/adapter_4/",
|
8 |
+
"adapter_5": "lamm-mit/x-lora/adapter_5/",
|
9 |
+
"adapter_6": "lamm-mit/x-lora/adapter_6/",
|
10 |
+
"adapter_7": "lamm-mit/x-lora/adapter_7/",
|
11 |
+
"adapter_8": "lamm-mit/x-lora/adapter_8/",
|
12 |
+
"adapter_9": "lamm-mit/x-lora/adapter_9/"
|
13 |
},
|
14 |
"hidden_size": 4096,
|
15 |
"enable_softmax": true,
|