Safetensors
mjbuehler commited on
Commit
764f6a1
1 Parent(s): 8ef4e7e

Update xlora_config.json

Browse files
Files changed (1) hide show
  1. xlora_config.json +9 -9
xlora_config.json CHANGED
@@ -1,15 +1,15 @@
1
  {
2
  "base_model_id": "HuggingFaceH4/zephyr-7b-beta",
3
  "adapters": {
4
- "adapter_1": "lamm-mit/x-lora/X-LoRA_adapters/1/",
5
- "adapter_2": "lamm-mit/x-lora/X-LoRA_adapters/2/",
6
- "adapter_3": "lamm-mit/x-lora/X-LoRA_adapters/3/",
7
- "adapter_4": "lamm-mit/x-lora/X-LoRA_adapters/4/",
8
- "adapter_5": "lamm-mit/x-lora/X-LoRA_adapters/5/",
9
- "adapter_6": "lamm-mit/x-lora/X-LoRA_adapters/6/",
10
- "adapter_7": "lamm-mit/x-lora/X-LoRA_adapters/7/",
11
- "adapter_8": "lamm-mit/x-lora/X-LoRA_adapters/8/",
12
- "adapter_9": "lamm-mit/x-lora/X-LoRA_adapters/9/"
13
  },
14
  "hidden_size": 4096,
15
  "enable_softmax": true,
 
1
  {
2
  "base_model_id": "HuggingFaceH4/zephyr-7b-beta",
3
  "adapters": {
4
+ "adapter_1": "lamm-mit/x-lora/adapter_1",
5
+ "adapter_2": "lamm-mit/x-lora/adapter_2/",
6
+ "adapter_3": "lamm-mit/x-lora/adapter_3/",
7
+ "adapter_4": "lamm-mit/x-lora/adapter_4/",
8
+ "adapter_5": "lamm-mit/x-lora/adapter_5/",
9
+ "adapter_6": "lamm-mit/x-lora/adapter_6/",
10
+ "adapter_7": "lamm-mit/x-lora/adapter_7/",
11
+ "adapter_8": "lamm-mit/x-lora/adapter_8/",
12
+ "adapter_9": "lamm-mit/x-lora/adapter_9/"
13
  },
14
  "hidden_size": 4096,
15
  "enable_softmax": true,