xianchaowu commited on
Commit
ed1b13e
1 Parent(s): 8b0c479

upload lazy lora adapter for llama-2 70b hf

Browse files
Files changed (4) hide show
  1. README.md +157 -0
  2. adapter_config.json +596 -0
  3. adapter_model.bin +3 -0
  4. usage.py +51 -0
README.md CHANGED
@@ -1,3 +1,160 @@
1
  ---
2
  license: llama2
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  license: llama2
3
  ---
4
+
5
+ ## Lazy LoRA
6
+
7
+ ### Benefits
8
+
9
+ 0. using the updated [Meta's LLaMA-2 models](https://huggingface.co/meta-llama/Llama-2-70b-hf).
10
+ 1. support [4-bit qlora](https://arxiv.org/abs/2305.14314), extreme GPU memory and inference time saving;
11
+ 2. comparable MMLU evaluation dataset results, llama2-70b's 68.9% to our 68.21% (-0.69%).
12
+ 3. This lazy-lora adapter is based on [Meta's LLaMA-2-70b-hf](https://huggingface.co/meta-llama/Llama-2-70b-hf), and using the [oasst1 dataset](https://huggingface.co/datasets/OpenAssistant/oasst1), following [Guanaco](https://huggingface.co/timdettmers/guanaco-65b).
13
+
14
+ ### Introduction
15
+ Determine the rank of LoRA layers by the singular values of pretrained weight matrices.
16
+ Also, combines:
17
+ 1. LoRA: [LORA: LOW-RANK ADAPTATION OF LARGE LANGUAGE MODELS](https://arxiv.org/abs/2106.09685)
18
+ 2. Prefix Tuning: [Prefix-Tuning: Optimizing Continuous Prompts for Generation](https://aclanthology.org/2021.acl-long.3
19
+ 53/), [P-Tuning v2: Prompt Tuning Can Be Comparable to Fine-tuning Universally Across Scales and Tasks](https://arxiv.or
20
+ g/pdf/2110.07602.pdf)
21
+ 3. Prompt Tuning: [The Power of Scale for Parameter-Efficient Prompt Tuning](https://arxiv.org/abs/2104.08691)
22
+ 4. LLaMA adapter: [LLaMA-Adapter: Efficient Fine-tuning of Language Models with Zero-init Attention] (https://arxiv.org/abs/2303.16199)
23
+ in one model.
24
+
25
+ This allows you to perform LoRA (additional low rank adapters inserted to each linear layer), and prompt learning (additional virtual tokens attached to the input and to the attention layers acting as `past_key_values`)
26
+
27
+ ## Usage:
28
+ ```python
29
+ import sys
30
+ sys.path.insert(1, '/workspace/asr/peft/src')
31
+ # TODO set this path to the lazy-lora source code path,
32
+ # or you can install it from source code:
33
+ # TODO, please install lazylora for usage:
34
+ # git clone [email protected]:Xianchao-Wu/peft.git
35
+ # cd peft
36
+ # python setup.py install
37
+
38
+ from transformers import (AutoTokenizer,
39
+ AutoModelForCausalLM, BitsAndBytesConfig)
40
+ from peft import PeftModel, PeftConfig
41
+ import os
42
+ import torch
43
+
44
+ #import ipdb; ipdb.set_trace()
45
+ cache_dir="/workspace/asr/peft/qlora"
46
+ # TODO set this cache_dir to the path where you
47
+ # stored (or, want to store) llama2-70b-hf model
48
+
49
+ lazylora_dir=os.getcwd()
50
+ # the path that contains 'adapter_config.json'
51
+ # and 'adapter_model.bin'
52
+
53
+ config = PeftConfig.from_pretrained(lazylora_dir)
54
+
55
+ tokenizer = AutoTokenizer.from_pretrained(
56
+ config.base_model_name_or_path,
57
+ cache_dir=cache_dir,
58
+ use_auth_token=True
59
+ )
60
+
61
+ bnb_config = BitsAndBytesConfig(
62
+ load_in_4bit=True,
63
+ bnb_4bit_use_double_quant=True,
64
+ bnb_4bit_quant_type='nf4',
65
+ bnb_4bit_compute_dtype=torch.bfloat16
66
+ )
67
+
68
+ model = AutoModelForCausalLM.from_pretrained(
69
+ config.base_model_name_or_path,
70
+ quantization_config=bnb_config,
71
+ device_map="auto",
72
+ cache_dir=cache_dir,
73
+ use_auth_token=True
74
+ )
75
+ #model.print_trainable_parameters()
76
+ print(sum(p.numel() for p in model.parameters()))
77
+ # 34,751,127,552 -> half-size of 70B due to 4-bit loading
78
+
79
+ model = PeftModel.from_pretrained(model, lazylora_dir)
80
+ print('after adding lazy lora parameters:')
81
+ model.print_trainable_parameters()
82
+ # trainable params: 0 || all params: 35,579,442,176 || trainable%: 0.0
83
+ ```
84
+
85
+ ## MMLU result:
86
+
87
+ ```json
88
+ {"mmlu_loss": 2.3140328107200987,
89
+ "mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
90
+ "mmlu_eval_accuracy_high_school_chemistry": 0.5,
91
+ "mmlu_eval_accuracy_college_physics": 0.45454545454545453,
92
+ "mmlu_eval_accuracy_international_law": 0.9230769230769231,
93
+ "mmlu_eval_accuracy_nutrition": 0.696969696969697,
94
+ "mmlu_eval_accuracy_world_religions": 0.8947368421052632,
95
+ "mmlu_eval_accuracy_medical_genetics": 1.0,
96
+ "mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
97
+ "mmlu_eval_accuracy_anatomy": 0.5,
98
+ "mmlu_eval_accuracy_sociology": 1.0,
99
+ "mmlu_eval_accuracy_human_sexuality": 0.5833333333333334,
100
+ "mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
101
+ "mmlu_eval_accuracy_jurisprudence": 0.7272727272727273,
102
+ "mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
103
+ "mmlu_eval_accuracy_college_biology": 0.8125,
104
+ "mmlu_eval_accuracy_machine_learning": 0.5454545454545454,
105
+ "mmlu_eval_accuracy_us_foreign_policy": 1.0,
106
+ "mmlu_eval_accuracy_high_school_microeconomics": 0.7692307692307693,
107
+ "mmlu_eval_accuracy_high_school_us_history": 1.0,
108
+ "mmlu_eval_accuracy_security_studies": 0.7777777777777778,
109
+ "mmlu_eval_accuracy_college_chemistry": 0.25,
110
+ "mmlu_eval_accuracy_college_computer_science": 0.5454545454545454,
111
+ "mmlu_eval_accuracy_miscellaneous": 0.7790697674418605,
112
+ "mmlu_eval_accuracy_professional_accounting": 0.7419354838709677,
113
+ "mmlu_eval_accuracy_business_ethics": 0.7272727272727273,
114
+ "mmlu_eval_accuracy_electrical_engineering": 0.5625,
115
+ "mmlu_eval_accuracy_elementary_mathematics": 0.4878048780487805,
116
+ "mmlu_eval_accuracy_high_school_biology": 0.71875,
117
+ "mmlu_eval_accuracy_college_mathematics": 0.45454545454545453,
118
+ "mmlu_eval_accuracy_high_school_european_history": 0.7777777777777778,
119
+ "mmlu_eval_accuracy_professional_law": 0.5588235294117647,
120
+ "mmlu_eval_accuracy_prehistory": 0.8,
121
+ "mmlu_eval_accuracy_high_school_macroeconomics": 0.7674418604651163,
122
+ "mmlu_eval_accuracy_formal_logic": 0.42857142857142855,
123
+ "mmlu_eval_accuracy_philosophy": 0.7941176470588235,
124
+ "mmlu_eval_accuracy_astronomy": 0.75,
125
+ "mmlu_eval_accuracy_clinical_knowledge": 0.7586206896551724,
126
+ "mmlu_eval_accuracy_global_facts": 0.5,
127
+ "mmlu_eval_accuracy_high_school_government_and_politics": 0.9523809523809523,
128
+ "mmlu_eval_accuracy_moral_disputes": 0.6842105263157895,
129
+ "mmlu_eval_accuracy_econometrics": 0.5,
130
+ "mmlu_eval_accuracy_management": 0.9090909090909091,
131
+ "mmlu_eval_accuracy_high_school_psychology": 0.9666666666666667,
132
+ "mmlu_eval_accuracy_high_school_geography": 0.9090909090909091,
133
+ "mmlu_eval_accuracy_human_aging": 0.6956521739130435,
134
+ "mmlu_eval_accuracy_logical_fallacies": 0.7222222222222222,
135
+ "mmlu_eval_accuracy_moral_scenarios": 0.49,
136
+ "mmlu_eval_accuracy_conceptual_physics": 0.5384615384615384,
137
+ "mmlu_eval_accuracy_professional_psychology": 0.782608695652174,
138
+ "mmlu_eval_accuracy_college_medicine": 0.7727272727272727,
139
+ "mmlu_eval_accuracy_high_school_physics": 0.11764705882352941,
140
+ "mmlu_eval_accuracy_computer_security": 0.7272727272727273,
141
+ "mmlu_eval_accuracy_virology": 0.5555555555555556,
142
+ "mmlu_eval_accuracy_professional_medicine": 0.7741935483870968,
143
+ "mmlu_eval_accuracy_marketing": 0.96,
144
+ "mmlu_eval_accuracy_public_relations": 0.6666666666666666,
145
+ "mmlu_eval_accuracy_high_school_statistics": 0.5652173913043478,
146
+ "mmlu_eval_accuracy": 0.682100004303323,
147
+ "epoch": 1.7}
148
+ ```
149
+
150
+ ## License and intended use
151
+
152
+ This lazy-lora adapter is based on [Meta's LLaMA-2-70b-hf](https://huggingface.co/meta-llama/Llama-2-70b-hf), and using the [oasst1 dataset](https://huggingface.co/datasets/OpenAssistant/oasst1), following [Guanaco](https://huggingface.co/timdettmers/guanaco-65b).
153
+
154
+ lazy lora adapter weights are available under LLAMA-2 license. Note the use of the lazy lora adapter weights, requires access to the LLaMA model weighs. Lazy lora is based on LLaMA and therefore should be used according to the LLaMA license.
155
+
156
+
157
+ ## Risks and Biases
158
+
159
+ The model can produce factually incorrect output, and should not be relied on to produce factually accurate information. The model was trained on various public datasets; it is possible that this model could generate lewd, biased, or otherwise offensive outputs.
160
+
adapter_config.json ADDED
@@ -0,0 +1,596 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "meta-llama/Llama-2-70b-hf",
3
+ "bias": "none",
4
+ "fan_in_fan_out": false,
5
+ "inference_mode": true,
6
+ "init_lazy_lora_weights": true,
7
+ "is_r_by_svd": true,
8
+ "is_r_reuse": true,
9
+ "lazy_lora_alpha": 16.0,
10
+ "lazy_lora_dropout": 0.05,
11
+ "lazy_pre_adapter_type": "none",
12
+ "lazy_pre_lora_alpha": 0.1,
13
+ "modules_to_save": null,
14
+ "num_attention_heads": 64,
15
+ "num_layers": 80,
16
+ "num_transformer_submodules": 1,
17
+ "num_virtual_tokens": null,
18
+ "peft_type": "LAZY_LORA",
19
+ "prefix_tuning_config": null,
20
+ "prompt_tuning_config": null,
21
+ "r": 64,
22
+ "r_by_module_dict": {
23
+ "model.layers.0.mlp.down_proj": 44,
24
+ "model.layers.0.mlp.gate_proj": 30,
25
+ "model.layers.0.mlp.up_proj": 31,
26
+ "model.layers.0.self_attn.k_proj": 39,
27
+ "model.layers.0.self_attn.o_proj": 29,
28
+ "model.layers.0.self_attn.q_proj": 26,
29
+ "model.layers.0.self_attn.v_proj": 35,
30
+ "model.layers.1.mlp.down_proj": 57,
31
+ "model.layers.1.mlp.gate_proj": 42,
32
+ "model.layers.1.mlp.up_proj": 46,
33
+ "model.layers.1.self_attn.k_proj": 62,
34
+ "model.layers.1.self_attn.o_proj": 36,
35
+ "model.layers.1.self_attn.q_proj": 41,
36
+ "model.layers.1.self_attn.v_proj": 34,
37
+ "model.layers.10.mlp.down_proj": 63,
38
+ "model.layers.10.mlp.gate_proj": 64,
39
+ "model.layers.10.mlp.up_proj": 64,
40
+ "model.layers.10.self_attn.k_proj": 67,
41
+ "model.layers.10.self_attn.o_proj": 60,
42
+ "model.layers.10.self_attn.q_proj": 68,
43
+ "model.layers.10.self_attn.v_proj": 56,
44
+ "model.layers.11.mlp.down_proj": 63,
45
+ "model.layers.11.mlp.gate_proj": 64,
46
+ "model.layers.11.mlp.up_proj": 64,
47
+ "model.layers.11.self_attn.k_proj": 76,
48
+ "model.layers.11.self_attn.o_proj": 59,
49
+ "model.layers.11.self_attn.q_proj": 74,
50
+ "model.layers.11.self_attn.v_proj": 52,
51
+ "model.layers.12.mlp.down_proj": 63,
52
+ "model.layers.12.mlp.gate_proj": 64,
53
+ "model.layers.12.mlp.up_proj": 64,
54
+ "model.layers.12.self_attn.k_proj": 81,
55
+ "model.layers.12.self_attn.o_proj": 58,
56
+ "model.layers.12.self_attn.q_proj": 77,
57
+ "model.layers.12.self_attn.v_proj": 53,
58
+ "model.layers.13.mlp.down_proj": 63,
59
+ "model.layers.13.mlp.gate_proj": 63,
60
+ "model.layers.13.mlp.up_proj": 64,
61
+ "model.layers.13.self_attn.k_proj": 74,
62
+ "model.layers.13.self_attn.o_proj": 62,
63
+ "model.layers.13.self_attn.q_proj": 75,
64
+ "model.layers.13.self_attn.v_proj": 51,
65
+ "model.layers.14.mlp.down_proj": 63,
66
+ "model.layers.14.mlp.gate_proj": 63,
67
+ "model.layers.14.mlp.up_proj": 64,
68
+ "model.layers.14.self_attn.k_proj": 76,
69
+ "model.layers.14.self_attn.o_proj": 61,
70
+ "model.layers.14.self_attn.q_proj": 80,
71
+ "model.layers.14.self_attn.v_proj": 51,
72
+ "model.layers.15.mlp.down_proj": 63,
73
+ "model.layers.15.mlp.gate_proj": 63,
74
+ "model.layers.15.mlp.up_proj": 64,
75
+ "model.layers.15.self_attn.k_proj": 68,
76
+ "model.layers.15.self_attn.o_proj": 64,
77
+ "model.layers.15.self_attn.q_proj": 70,
78
+ "model.layers.15.self_attn.v_proj": 56,
79
+ "model.layers.16.mlp.down_proj": 63,
80
+ "model.layers.16.mlp.gate_proj": 63,
81
+ "model.layers.16.mlp.up_proj": 64,
82
+ "model.layers.16.self_attn.k_proj": 73,
83
+ "model.layers.16.self_attn.o_proj": 63,
84
+ "model.layers.16.self_attn.q_proj": 71,
85
+ "model.layers.16.self_attn.v_proj": 55,
86
+ "model.layers.17.mlp.down_proj": 64,
87
+ "model.layers.17.mlp.gate_proj": 63,
88
+ "model.layers.17.mlp.up_proj": 64,
89
+ "model.layers.17.self_attn.k_proj": 79,
90
+ "model.layers.17.self_attn.o_proj": 63,
91
+ "model.layers.17.self_attn.q_proj": 79,
92
+ "model.layers.17.self_attn.v_proj": 53,
93
+ "model.layers.18.mlp.down_proj": 64,
94
+ "model.layers.18.mlp.gate_proj": 63,
95
+ "model.layers.18.mlp.up_proj": 64,
96
+ "model.layers.18.self_attn.k_proj": 71,
97
+ "model.layers.18.self_attn.o_proj": 63,
98
+ "model.layers.18.self_attn.q_proj": 72,
99
+ "model.layers.18.self_attn.v_proj": 58,
100
+ "model.layers.19.mlp.down_proj": 64,
101
+ "model.layers.19.mlp.gate_proj": 63,
102
+ "model.layers.19.mlp.up_proj": 64,
103
+ "model.layers.19.self_attn.k_proj": 73,
104
+ "model.layers.19.self_attn.o_proj": 68,
105
+ "model.layers.19.self_attn.q_proj": 74,
106
+ "model.layers.19.self_attn.v_proj": 61,
107
+ "model.layers.2.mlp.down_proj": 61,
108
+ "model.layers.2.mlp.gate_proj": 51,
109
+ "model.layers.2.mlp.up_proj": 54,
110
+ "model.layers.2.self_attn.k_proj": 81,
111
+ "model.layers.2.self_attn.o_proj": 50,
112
+ "model.layers.2.self_attn.q_proj": 58,
113
+ "model.layers.2.self_attn.v_proj": 39,
114
+ "model.layers.20.mlp.down_proj": 64,
115
+ "model.layers.20.mlp.gate_proj": 63,
116
+ "model.layers.20.mlp.up_proj": 64,
117
+ "model.layers.20.self_attn.k_proj": 74,
118
+ "model.layers.20.self_attn.o_proj": 63,
119
+ "model.layers.20.self_attn.q_proj": 73,
120
+ "model.layers.20.self_attn.v_proj": 57,
121
+ "model.layers.21.mlp.down_proj": 64,
122
+ "model.layers.21.mlp.gate_proj": 63,
123
+ "model.layers.21.mlp.up_proj": 65,
124
+ "model.layers.21.self_attn.k_proj": 66,
125
+ "model.layers.21.self_attn.o_proj": 63,
126
+ "model.layers.21.self_attn.q_proj": 65,
127
+ "model.layers.21.self_attn.v_proj": 56,
128
+ "model.layers.22.mlp.down_proj": 64,
129
+ "model.layers.22.mlp.gate_proj": 64,
130
+ "model.layers.22.mlp.up_proj": 65,
131
+ "model.layers.22.self_attn.k_proj": 69,
132
+ "model.layers.22.self_attn.o_proj": 67,
133
+ "model.layers.22.self_attn.q_proj": 64,
134
+ "model.layers.22.self_attn.v_proj": 62,
135
+ "model.layers.23.mlp.down_proj": 63,
136
+ "model.layers.23.mlp.gate_proj": 64,
137
+ "model.layers.23.mlp.up_proj": 65,
138
+ "model.layers.23.self_attn.k_proj": 74,
139
+ "model.layers.23.self_attn.o_proj": 65,
140
+ "model.layers.23.self_attn.q_proj": 71,
141
+ "model.layers.23.self_attn.v_proj": 61,
142
+ "model.layers.24.mlp.down_proj": 63,
143
+ "model.layers.24.mlp.gate_proj": 63,
144
+ "model.layers.24.mlp.up_proj": 65,
145
+ "model.layers.24.self_attn.k_proj": 75,
146
+ "model.layers.24.self_attn.o_proj": 60,
147
+ "model.layers.24.self_attn.q_proj": 70,
148
+ "model.layers.24.self_attn.v_proj": 56,
149
+ "model.layers.25.mlp.down_proj": 64,
150
+ "model.layers.25.mlp.gate_proj": 62,
151
+ "model.layers.25.mlp.up_proj": 64,
152
+ "model.layers.25.self_attn.k_proj": 78,
153
+ "model.layers.25.self_attn.o_proj": 63,
154
+ "model.layers.25.self_attn.q_proj": 73,
155
+ "model.layers.25.self_attn.v_proj": 54,
156
+ "model.layers.26.mlp.down_proj": 64,
157
+ "model.layers.26.mlp.gate_proj": 63,
158
+ "model.layers.26.mlp.up_proj": 65,
159
+ "model.layers.26.self_attn.k_proj": 76,
160
+ "model.layers.26.self_attn.o_proj": 66,
161
+ "model.layers.26.self_attn.q_proj": 75,
162
+ "model.layers.26.self_attn.v_proj": 58,
163
+ "model.layers.27.mlp.down_proj": 65,
164
+ "model.layers.27.mlp.gate_proj": 63,
165
+ "model.layers.27.mlp.up_proj": 65,
166
+ "model.layers.27.self_attn.k_proj": 68,
167
+ "model.layers.27.self_attn.o_proj": 64,
168
+ "model.layers.27.self_attn.q_proj": 62,
169
+ "model.layers.27.self_attn.v_proj": 62,
170
+ "model.layers.28.mlp.down_proj": 65,
171
+ "model.layers.28.mlp.gate_proj": 63,
172
+ "model.layers.28.mlp.up_proj": 65,
173
+ "model.layers.28.self_attn.k_proj": 71,
174
+ "model.layers.28.self_attn.o_proj": 67,
175
+ "model.layers.28.self_attn.q_proj": 68,
176
+ "model.layers.28.self_attn.v_proj": 64,
177
+ "model.layers.29.mlp.down_proj": 65,
178
+ "model.layers.29.mlp.gate_proj": 63,
179
+ "model.layers.29.mlp.up_proj": 65,
180
+ "model.layers.29.self_attn.k_proj": 68,
181
+ "model.layers.29.self_attn.o_proj": 67,
182
+ "model.layers.29.self_attn.q_proj": 66,
183
+ "model.layers.29.self_attn.v_proj": 63,
184
+ "model.layers.3.mlp.down_proj": 62,
185
+ "model.layers.3.mlp.gate_proj": 56,
186
+ "model.layers.3.mlp.up_proj": 58,
187
+ "model.layers.3.self_attn.k_proj": 83,
188
+ "model.layers.3.self_attn.o_proj": 59,
189
+ "model.layers.3.self_attn.q_proj": 70,
190
+ "model.layers.3.self_attn.v_proj": 46,
191
+ "model.layers.30.mlp.down_proj": 64,
192
+ "model.layers.30.mlp.gate_proj": 64,
193
+ "model.layers.30.mlp.up_proj": 65,
194
+ "model.layers.30.self_attn.k_proj": 54,
195
+ "model.layers.30.self_attn.o_proj": 67,
196
+ "model.layers.30.self_attn.q_proj": 52,
197
+ "model.layers.30.self_attn.v_proj": 65,
198
+ "model.layers.31.mlp.down_proj": 64,
199
+ "model.layers.31.mlp.gate_proj": 64,
200
+ "model.layers.31.mlp.up_proj": 65,
201
+ "model.layers.31.self_attn.k_proj": 68,
202
+ "model.layers.31.self_attn.o_proj": 65,
203
+ "model.layers.31.self_attn.q_proj": 67,
204
+ "model.layers.31.self_attn.v_proj": 62,
205
+ "model.layers.32.mlp.down_proj": 64,
206
+ "model.layers.32.mlp.gate_proj": 64,
207
+ "model.layers.32.mlp.up_proj": 65,
208
+ "model.layers.32.self_attn.k_proj": 72,
209
+ "model.layers.32.self_attn.o_proj": 63,
210
+ "model.layers.32.self_attn.q_proj": 71,
211
+ "model.layers.32.self_attn.v_proj": 62,
212
+ "model.layers.33.mlp.down_proj": 64,
213
+ "model.layers.33.mlp.gate_proj": 63,
214
+ "model.layers.33.mlp.up_proj": 65,
215
+ "model.layers.33.self_attn.k_proj": 75,
216
+ "model.layers.33.self_attn.o_proj": 63,
217
+ "model.layers.33.self_attn.q_proj": 77,
218
+ "model.layers.33.self_attn.v_proj": 60,
219
+ "model.layers.34.mlp.down_proj": 64,
220
+ "model.layers.34.mlp.gate_proj": 63,
221
+ "model.layers.34.mlp.up_proj": 65,
222
+ "model.layers.34.self_attn.k_proj": 68,
223
+ "model.layers.34.self_attn.o_proj": 62,
224
+ "model.layers.34.self_attn.q_proj": 64,
225
+ "model.layers.34.self_attn.v_proj": 61,
226
+ "model.layers.35.mlp.down_proj": 64,
227
+ "model.layers.35.mlp.gate_proj": 63,
228
+ "model.layers.35.mlp.up_proj": 65,
229
+ "model.layers.35.self_attn.k_proj": 66,
230
+ "model.layers.35.self_attn.o_proj": 61,
231
+ "model.layers.35.self_attn.q_proj": 67,
232
+ "model.layers.35.self_attn.v_proj": 60,
233
+ "model.layers.36.mlp.down_proj": 65,
234
+ "model.layers.36.mlp.gate_proj": 63,
235
+ "model.layers.36.mlp.up_proj": 65,
236
+ "model.layers.36.self_attn.k_proj": 69,
237
+ "model.layers.36.self_attn.o_proj": 60,
238
+ "model.layers.36.self_attn.q_proj": 69,
239
+ "model.layers.36.self_attn.v_proj": 61,
240
+ "model.layers.37.mlp.down_proj": 65,
241
+ "model.layers.37.mlp.gate_proj": 63,
242
+ "model.layers.37.mlp.up_proj": 65,
243
+ "model.layers.37.self_attn.k_proj": 73,
244
+ "model.layers.37.self_attn.o_proj": 62,
245
+ "model.layers.37.self_attn.q_proj": 75,
246
+ "model.layers.37.self_attn.v_proj": 61,
247
+ "model.layers.38.mlp.down_proj": 65,
248
+ "model.layers.38.mlp.gate_proj": 63,
249
+ "model.layers.38.mlp.up_proj": 65,
250
+ "model.layers.38.self_attn.k_proj": 72,
251
+ "model.layers.38.self_attn.o_proj": 60,
252
+ "model.layers.38.self_attn.q_proj": 76,
253
+ "model.layers.38.self_attn.v_proj": 59,
254
+ "model.layers.39.mlp.down_proj": 65,
255
+ "model.layers.39.mlp.gate_proj": 63,
256
+ "model.layers.39.mlp.up_proj": 65,
257
+ "model.layers.39.self_attn.k_proj": 77,
258
+ "model.layers.39.self_attn.o_proj": 61,
259
+ "model.layers.39.self_attn.q_proj": 80,
260
+ "model.layers.39.self_attn.v_proj": 61,
261
+ "model.layers.4.mlp.down_proj": 62,
262
+ "model.layers.4.mlp.gate_proj": 59,
263
+ "model.layers.4.mlp.up_proj": 60,
264
+ "model.layers.4.self_attn.k_proj": 74,
265
+ "model.layers.4.self_attn.o_proj": 55,
266
+ "model.layers.4.self_attn.q_proj": 71,
267
+ "model.layers.4.self_attn.v_proj": 44,
268
+ "model.layers.40.mlp.down_proj": 65,
269
+ "model.layers.40.mlp.gate_proj": 63,
270
+ "model.layers.40.mlp.up_proj": 65,
271
+ "model.layers.40.self_attn.k_proj": 73,
272
+ "model.layers.40.self_attn.o_proj": 68,
273
+ "model.layers.40.self_attn.q_proj": 79,
274
+ "model.layers.40.self_attn.v_proj": 65,
275
+ "model.layers.41.mlp.down_proj": 66,
276
+ "model.layers.41.mlp.gate_proj": 63,
277
+ "model.layers.41.mlp.up_proj": 64,
278
+ "model.layers.41.self_attn.k_proj": 70,
279
+ "model.layers.41.self_attn.o_proj": 65,
280
+ "model.layers.41.self_attn.q_proj": 74,
281
+ "model.layers.41.self_attn.v_proj": 63,
282
+ "model.layers.42.mlp.down_proj": 65,
283
+ "model.layers.42.mlp.gate_proj": 64,
284
+ "model.layers.42.mlp.up_proj": 64,
285
+ "model.layers.42.self_attn.k_proj": 68,
286
+ "model.layers.42.self_attn.o_proj": 65,
287
+ "model.layers.42.self_attn.q_proj": 74,
288
+ "model.layers.42.self_attn.v_proj": 65,
289
+ "model.layers.43.mlp.down_proj": 66,
290
+ "model.layers.43.mlp.gate_proj": 64,
291
+ "model.layers.43.mlp.up_proj": 64,
292
+ "model.layers.43.self_attn.k_proj": 69,
293
+ "model.layers.43.self_attn.o_proj": 65,
294
+ "model.layers.43.self_attn.q_proj": 70,
295
+ "model.layers.43.self_attn.v_proj": 64,
296
+ "model.layers.44.mlp.down_proj": 65,
297
+ "model.layers.44.mlp.gate_proj": 65,
298
+ "model.layers.44.mlp.up_proj": 63,
299
+ "model.layers.44.self_attn.k_proj": 67,
300
+ "model.layers.44.self_attn.o_proj": 72,
301
+ "model.layers.44.self_attn.q_proj": 76,
302
+ "model.layers.44.self_attn.v_proj": 67,
303
+ "model.layers.45.mlp.down_proj": 65,
304
+ "model.layers.45.mlp.gate_proj": 65,
305
+ "model.layers.45.mlp.up_proj": 63,
306
+ "model.layers.45.self_attn.k_proj": 64,
307
+ "model.layers.45.self_attn.o_proj": 70,
308
+ "model.layers.45.self_attn.q_proj": 66,
309
+ "model.layers.45.self_attn.v_proj": 70,
310
+ "model.layers.46.mlp.down_proj": 65,
311
+ "model.layers.46.mlp.gate_proj": 66,
312
+ "model.layers.46.mlp.up_proj": 63,
313
+ "model.layers.46.self_attn.k_proj": 61,
314
+ "model.layers.46.self_attn.o_proj": 71,
315
+ "model.layers.46.self_attn.q_proj": 63,
316
+ "model.layers.46.self_attn.v_proj": 69,
317
+ "model.layers.47.mlp.down_proj": 65,
318
+ "model.layers.47.mlp.gate_proj": 66,
319
+ "model.layers.47.mlp.up_proj": 63,
320
+ "model.layers.47.self_attn.k_proj": 71,
321
+ "model.layers.47.self_attn.o_proj": 68,
322
+ "model.layers.47.self_attn.q_proj": 73,
323
+ "model.layers.47.self_attn.v_proj": 67,
324
+ "model.layers.48.mlp.down_proj": 65,
325
+ "model.layers.48.mlp.gate_proj": 67,
326
+ "model.layers.48.mlp.up_proj": 63,
327
+ "model.layers.48.self_attn.k_proj": 54,
328
+ "model.layers.48.self_attn.o_proj": 71,
329
+ "model.layers.48.self_attn.q_proj": 58,
330
+ "model.layers.48.self_attn.v_proj": 71,
331
+ "model.layers.49.mlp.down_proj": 65,
332
+ "model.layers.49.mlp.gate_proj": 67,
333
+ "model.layers.49.mlp.up_proj": 64,
334
+ "model.layers.49.self_attn.k_proj": 53,
335
+ "model.layers.49.self_attn.o_proj": 67,
336
+ "model.layers.49.self_attn.q_proj": 50,
337
+ "model.layers.49.self_attn.v_proj": 70,
338
+ "model.layers.5.mlp.down_proj": 62,
339
+ "model.layers.5.mlp.gate_proj": 60,
340
+ "model.layers.5.mlp.up_proj": 61,
341
+ "model.layers.5.self_attn.k_proj": 80,
342
+ "model.layers.5.self_attn.o_proj": 57,
343
+ "model.layers.5.self_attn.q_proj": 77,
344
+ "model.layers.5.self_attn.v_proj": 46,
345
+ "model.layers.50.mlp.down_proj": 65,
346
+ "model.layers.50.mlp.gate_proj": 67,
347
+ "model.layers.50.mlp.up_proj": 64,
348
+ "model.layers.50.self_attn.k_proj": 50,
349
+ "model.layers.50.self_attn.o_proj": 70,
350
+ "model.layers.50.self_attn.q_proj": 53,
351
+ "model.layers.50.self_attn.v_proj": 71,
352
+ "model.layers.51.mlp.down_proj": 65,
353
+ "model.layers.51.mlp.gate_proj": 67,
354
+ "model.layers.51.mlp.up_proj": 64,
355
+ "model.layers.51.self_attn.k_proj": 56,
356
+ "model.layers.51.self_attn.o_proj": 68,
357
+ "model.layers.51.self_attn.q_proj": 59,
358
+ "model.layers.51.self_attn.v_proj": 72,
359
+ "model.layers.52.mlp.down_proj": 65,
360
+ "model.layers.52.mlp.gate_proj": 67,
361
+ "model.layers.52.mlp.up_proj": 64,
362
+ "model.layers.52.self_attn.k_proj": 57,
363
+ "model.layers.52.self_attn.o_proj": 70,
364
+ "model.layers.52.self_attn.q_proj": 66,
365
+ "model.layers.52.self_attn.v_proj": 69,
366
+ "model.layers.53.mlp.down_proj": 65,
367
+ "model.layers.53.mlp.gate_proj": 67,
368
+ "model.layers.53.mlp.up_proj": 64,
369
+ "model.layers.53.self_attn.k_proj": 46,
370
+ "model.layers.53.self_attn.o_proj": 70,
371
+ "model.layers.53.self_attn.q_proj": 49,
372
+ "model.layers.53.self_attn.v_proj": 74,
373
+ "model.layers.54.mlp.down_proj": 65,
374
+ "model.layers.54.mlp.gate_proj": 68,
375
+ "model.layers.54.mlp.up_proj": 64,
376
+ "model.layers.54.self_attn.k_proj": 46,
377
+ "model.layers.54.self_attn.o_proj": 69,
378
+ "model.layers.54.self_attn.q_proj": 50,
379
+ "model.layers.54.self_attn.v_proj": 73,
380
+ "model.layers.55.mlp.down_proj": 65,
381
+ "model.layers.55.mlp.gate_proj": 68,
382
+ "model.layers.55.mlp.up_proj": 65,
383
+ "model.layers.55.self_attn.k_proj": 56,
384
+ "model.layers.55.self_attn.o_proj": 72,
385
+ "model.layers.55.self_attn.q_proj": 59,
386
+ "model.layers.55.self_attn.v_proj": 77,
387
+ "model.layers.56.mlp.down_proj": 65,
388
+ "model.layers.56.mlp.gate_proj": 68,
389
+ "model.layers.56.mlp.up_proj": 65,
390
+ "model.layers.56.self_attn.k_proj": 55,
391
+ "model.layers.56.self_attn.o_proj": 68,
392
+ "model.layers.56.self_attn.q_proj": 56,
393
+ "model.layers.56.self_attn.v_proj": 73,
394
+ "model.layers.57.mlp.down_proj": 65,
395
+ "model.layers.57.mlp.gate_proj": 68,
396
+ "model.layers.57.mlp.up_proj": 65,
397
+ "model.layers.57.self_attn.k_proj": 48,
398
+ "model.layers.57.self_attn.o_proj": 72,
399
+ "model.layers.57.self_attn.q_proj": 52,
400
+ "model.layers.57.self_attn.v_proj": 77,
401
+ "model.layers.58.mlp.down_proj": 65,
402
+ "model.layers.58.mlp.gate_proj": 68,
403
+ "model.layers.58.mlp.up_proj": 65,
404
+ "model.layers.58.self_attn.k_proj": 44,
405
+ "model.layers.58.self_attn.o_proj": 69,
406
+ "model.layers.58.self_attn.q_proj": 46,
407
+ "model.layers.58.self_attn.v_proj": 73,
408
+ "model.layers.59.mlp.down_proj": 65,
409
+ "model.layers.59.mlp.gate_proj": 68,
410
+ "model.layers.59.mlp.up_proj": 65,
411
+ "model.layers.59.self_attn.k_proj": 45,
412
+ "model.layers.59.self_attn.o_proj": 68,
413
+ "model.layers.59.self_attn.q_proj": 47,
414
+ "model.layers.59.self_attn.v_proj": 74,
415
+ "model.layers.6.mlp.down_proj": 62,
416
+ "model.layers.6.mlp.gate_proj": 60,
417
+ "model.layers.6.mlp.up_proj": 61,
418
+ "model.layers.6.self_attn.k_proj": 80,
419
+ "model.layers.6.self_attn.o_proj": 61,
420
+ "model.layers.6.self_attn.q_proj": 80,
421
+ "model.layers.6.self_attn.v_proj": 47,
422
+ "model.layers.60.mlp.down_proj": 65,
423
+ "model.layers.60.mlp.gate_proj": 68,
424
+ "model.layers.60.mlp.up_proj": 66,
425
+ "model.layers.60.self_attn.k_proj": 27,
426
+ "model.layers.60.self_attn.o_proj": 67,
427
+ "model.layers.60.self_attn.q_proj": 28,
428
+ "model.layers.60.self_attn.v_proj": 74,
429
+ "model.layers.61.mlp.down_proj": 65,
430
+ "model.layers.61.mlp.gate_proj": 68,
431
+ "model.layers.61.mlp.up_proj": 66,
432
+ "model.layers.61.self_attn.k_proj": 41,
433
+ "model.layers.61.self_attn.o_proj": 70,
434
+ "model.layers.61.self_attn.q_proj": 43,
435
+ "model.layers.61.self_attn.v_proj": 76,
436
+ "model.layers.62.mlp.down_proj": 65,
437
+ "model.layers.62.mlp.gate_proj": 68,
438
+ "model.layers.62.mlp.up_proj": 66,
439
+ "model.layers.62.self_attn.k_proj": 42,
440
+ "model.layers.62.self_attn.o_proj": 68,
441
+ "model.layers.62.self_attn.q_proj": 42,
442
+ "model.layers.62.self_attn.v_proj": 75,
443
+ "model.layers.63.mlp.down_proj": 65,
444
+ "model.layers.63.mlp.gate_proj": 68,
445
+ "model.layers.63.mlp.up_proj": 66,
446
+ "model.layers.63.self_attn.k_proj": 40,
447
+ "model.layers.63.self_attn.o_proj": 65,
448
+ "model.layers.63.self_attn.q_proj": 43,
449
+ "model.layers.63.self_attn.v_proj": 72,
450
+ "model.layers.64.mlp.down_proj": 65,
451
+ "model.layers.64.mlp.gate_proj": 68,
452
+ "model.layers.64.mlp.up_proj": 66,
453
+ "model.layers.64.self_attn.k_proj": 50,
454
+ "model.layers.64.self_attn.o_proj": 71,
455
+ "model.layers.64.self_attn.q_proj": 51,
456
+ "model.layers.64.self_attn.v_proj": 77,
457
+ "model.layers.65.mlp.down_proj": 65,
458
+ "model.layers.65.mlp.gate_proj": 68,
459
+ "model.layers.65.mlp.up_proj": 66,
460
+ "model.layers.65.self_attn.k_proj": 33,
461
+ "model.layers.65.self_attn.o_proj": 64,
462
+ "model.layers.65.self_attn.q_proj": 35,
463
+ "model.layers.65.self_attn.v_proj": 71,
464
+ "model.layers.66.mlp.down_proj": 65,
465
+ "model.layers.66.mlp.gate_proj": 68,
466
+ "model.layers.66.mlp.up_proj": 67,
467
+ "model.layers.66.self_attn.k_proj": 41,
468
+ "model.layers.66.self_attn.o_proj": 68,
469
+ "model.layers.66.self_attn.q_proj": 43,
470
+ "model.layers.66.self_attn.v_proj": 74,
471
+ "model.layers.67.mlp.down_proj": 65,
472
+ "model.layers.67.mlp.gate_proj": 68,
473
+ "model.layers.67.mlp.up_proj": 67,
474
+ "model.layers.67.self_attn.k_proj": 35,
475
+ "model.layers.67.self_attn.o_proj": 57,
476
+ "model.layers.67.self_attn.q_proj": 34,
477
+ "model.layers.67.self_attn.v_proj": 67,
478
+ "model.layers.68.mlp.down_proj": 65,
479
+ "model.layers.68.mlp.gate_proj": 68,
480
+ "model.layers.68.mlp.up_proj": 67,
481
+ "model.layers.68.self_attn.k_proj": 54,
482
+ "model.layers.68.self_attn.o_proj": 71,
483
+ "model.layers.68.self_attn.q_proj": 56,
484
+ "model.layers.68.self_attn.v_proj": 80,
485
+ "model.layers.69.mlp.down_proj": 65,
486
+ "model.layers.69.mlp.gate_proj": 68,
487
+ "model.layers.69.mlp.up_proj": 67,
488
+ "model.layers.69.self_attn.k_proj": 65,
489
+ "model.layers.69.self_attn.o_proj": 72,
490
+ "model.layers.69.self_attn.q_proj": 65,
491
+ "model.layers.69.self_attn.v_proj": 84,
492
+ "model.layers.7.mlp.down_proj": 63,
493
+ "model.layers.7.mlp.gate_proj": 61,
494
+ "model.layers.7.mlp.up_proj": 62,
495
+ "model.layers.7.self_attn.k_proj": 81,
496
+ "model.layers.7.self_attn.o_proj": 59,
497
+ "model.layers.7.self_attn.q_proj": 82,
498
+ "model.layers.7.self_attn.v_proj": 45,
499
+ "model.layers.70.mlp.down_proj": 65,
500
+ "model.layers.70.mlp.gate_proj": 68,
501
+ "model.layers.70.mlp.up_proj": 68,
502
+ "model.layers.70.self_attn.k_proj": 58,
503
+ "model.layers.70.self_attn.o_proj": 66,
504
+ "model.layers.70.self_attn.q_proj": 60,
505
+ "model.layers.70.self_attn.v_proj": 77,
506
+ "model.layers.71.mlp.down_proj": 65,
507
+ "model.layers.71.mlp.gate_proj": 68,
508
+ "model.layers.71.mlp.up_proj": 68,
509
+ "model.layers.71.self_attn.k_proj": 65,
510
+ "model.layers.71.self_attn.o_proj": 68,
511
+ "model.layers.71.self_attn.q_proj": 65,
512
+ "model.layers.71.self_attn.v_proj": 77,
513
+ "model.layers.72.mlp.down_proj": 65,
514
+ "model.layers.72.mlp.gate_proj": 67,
515
+ "model.layers.72.mlp.up_proj": 68,
516
+ "model.layers.72.self_attn.k_proj": 70,
517
+ "model.layers.72.self_attn.o_proj": 69,
518
+ "model.layers.72.self_attn.q_proj": 72,
519
+ "model.layers.72.self_attn.v_proj": 77,
520
+ "model.layers.73.mlp.down_proj": 65,
521
+ "model.layers.73.mlp.gate_proj": 67,
522
+ "model.layers.73.mlp.up_proj": 68,
523
+ "model.layers.73.self_attn.k_proj": 67,
524
+ "model.layers.73.self_attn.o_proj": 69,
525
+ "model.layers.73.self_attn.q_proj": 68,
526
+ "model.layers.73.self_attn.v_proj": 82,
527
+ "model.layers.74.mlp.down_proj": 65,
528
+ "model.layers.74.mlp.gate_proj": 67,
529
+ "model.layers.74.mlp.up_proj": 68,
530
+ "model.layers.74.self_attn.k_proj": 68,
531
+ "model.layers.74.self_attn.o_proj": 65,
532
+ "model.layers.74.self_attn.q_proj": 70,
533
+ "model.layers.74.self_attn.v_proj": 77,
534
+ "model.layers.75.mlp.down_proj": 65,
535
+ "model.layers.75.mlp.gate_proj": 67,
536
+ "model.layers.75.mlp.up_proj": 68,
537
+ "model.layers.75.self_attn.k_proj": 64,
538
+ "model.layers.75.self_attn.o_proj": 65,
539
+ "model.layers.75.self_attn.q_proj": 68,
540
+ "model.layers.75.self_attn.v_proj": 82,
541
+ "model.layers.76.mlp.down_proj": 65,
542
+ "model.layers.76.mlp.gate_proj": 67,
543
+ "model.layers.76.mlp.up_proj": 68,
544
+ "model.layers.76.self_attn.k_proj": 67,
545
+ "model.layers.76.self_attn.o_proj": 71,
546
+ "model.layers.76.self_attn.q_proj": 75,
547
+ "model.layers.76.self_attn.v_proj": 89,
548
+ "model.layers.77.mlp.down_proj": 65,
549
+ "model.layers.77.mlp.gate_proj": 67,
550
+ "model.layers.77.mlp.up_proj": 68,
551
+ "model.layers.77.self_attn.k_proj": 64,
552
+ "model.layers.77.self_attn.o_proj": 65,
553
+ "model.layers.77.self_attn.q_proj": 69,
554
+ "model.layers.77.self_attn.v_proj": 84,
555
+ "model.layers.78.mlp.down_proj": 64,
556
+ "model.layers.78.mlp.gate_proj": 67,
557
+ "model.layers.78.mlp.up_proj": 68,
558
+ "model.layers.78.self_attn.k_proj": 64,
559
+ "model.layers.78.self_attn.o_proj": 60,
560
+ "model.layers.78.self_attn.q_proj": 66,
561
+ "model.layers.78.self_attn.v_proj": 77,
562
+ "model.layers.79.mlp.down_proj": 63,
563
+ "model.layers.79.mlp.gate_proj": 68,
564
+ "model.layers.79.mlp.up_proj": 67,
565
+ "model.layers.79.self_attn.k_proj": 65,
566
+ "model.layers.79.self_attn.o_proj": 48,
567
+ "model.layers.79.self_attn.q_proj": 61,
568
+ "model.layers.79.self_attn.v_proj": 62,
569
+ "model.layers.8.mlp.down_proj": 64,
570
+ "model.layers.8.mlp.gate_proj": 62,
571
+ "model.layers.8.mlp.up_proj": 62,
572
+ "model.layers.8.self_attn.k_proj": 82,
573
+ "model.layers.8.self_attn.o_proj": 61,
574
+ "model.layers.8.self_attn.q_proj": 82,
575
+ "model.layers.8.self_attn.v_proj": 46,
576
+ "model.layers.9.mlp.down_proj": 64,
577
+ "model.layers.9.mlp.gate_proj": 63,
578
+ "model.layers.9.mlp.up_proj": 63,
579
+ "model.layers.9.self_attn.k_proj": 77,
580
+ "model.layers.9.self_attn.o_proj": 62,
581
+ "model.layers.9.self_attn.q_proj": 76,
582
+ "model.layers.9.self_attn.v_proj": 51
583
+ },
584
+ "rank_file": "/workspace/asr/peft/qlora/llama2_70bhf_lazylora_r64_2.json",
585
+ "target_modules": [
586
+ "o_proj",
587
+ "up_proj",
588
+ "k_proj",
589
+ "down_proj",
590
+ "q_proj",
591
+ "v_proj",
592
+ "gate_proj"
593
+ ],
594
+ "task_type": "CAUSAL_LM",
595
+ "token_dim": 8192
596
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:857b3b46b329f38aadcc7ddf874025e17ec34f657ce778cd4a8e4a589455bbce
3
+ size 1657058821
usage.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ sys.path.insert(1, '/workspace/asr/peft/src')
3
+ # TODO set this path to the lazy-lora source code path, or you can install it from source code:
4
+ # TODO, please install lazylora for usage:
5
+ # git clone [email protected]:Xianchao-Wu/peft.git
6
+ # cd peft
7
+ # python setup.py install
8
+
9
+ from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
10
+ from peft import PeftModel, PeftConfig
11
+ import os
12
+ import torch
13
+
14
+ #import ipdb; ipdb.set_trace()
15
+ cache_dir="/workspace/asr/peft/qlora"
16
+ # TODO set this cache_dir to the path where you stored (or, want to store) llama2-70b-hf model
17
+
18
+ lazylora_dir=os.getcwd() # the path that contains 'adapter_config.json' and 'adapter_model.bin'
19
+
20
+ config = PeftConfig.from_pretrained(lazylora_dir)
21
+
22
+ tokenizer = AutoTokenizer.from_pretrained(
23
+ config.base_model_name_or_path,
24
+ cache_dir=cache_dir,
25
+ use_auth_token=True
26
+ )
27
+
28
+ bnb_config = BitsAndBytesConfig(
29
+ load_in_4bit=True,
30
+ bnb_4bit_use_double_quant=True,
31
+ bnb_4bit_quant_type='nf4',
32
+ bnb_4bit_compute_dtype=torch.bfloat16
33
+ )
34
+
35
+ model = AutoModelForCausalLM.from_pretrained(
36
+ config.base_model_name_or_path,
37
+ quantization_config=bnb_config,
38
+ device_map="auto",
39
+ cache_dir=cache_dir,
40
+ use_auth_token=True
41
+ )
42
+ #model.print_trainable_parameters()
43
+ print(sum(p.numel() for p in model.parameters()))
44
+ # 34,751,127,552 -> half-size of 70B due to 4-bit loading
45
+
46
+ model = PeftModel.from_pretrained(model, lazylora_dir)
47
+ print('after adding lazy lora parameters:')
48
+ model.print_trainable_parameters()
49
+ # trainable params: 0 || all params: 35,579,442,176 || trainable%: 0.0
50
+
51
+