BOT365 commited on
Commit
9a29588
1 Parent(s): 331ba12

End of training

Browse files
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  license: apache-2.0
3
- base_model: PY007/TinyLlama-1.1B-Chat-v0.3
4
  tags:
5
  - trl
6
  - sft
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  # tinyllama-colorist-lora
17
 
18
- This model is a fine-tuned version of [PY007/TinyLlama-1.1B-Chat-v0.3](https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.3) on the None dataset.
19
 
20
  ## Model description
21
 
 
1
  ---
2
  license: apache-2.0
3
+ base_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
4
  tags:
5
  - trl
6
  - sft
 
15
 
16
  # tinyllama-colorist-lora
17
 
18
+ This model is a fine-tuned version of [TinyLlama/TinyLlama-1.1B-Chat-v1.0](https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0) on the None dataset.
19
 
20
  ## Model description
21
 
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "PY007/TinyLlama-1.1B-Chat-v0.3",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "q_proj",
23
+ "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dcee7008c1c2880b7fc29ba1484ef1986d790c8dbb82b8753deade814025e3d8
3
  size 4517152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:405162d901d5c099516cfce852f9f1e0af684afc89c81cfe38ef81b2cf43f223
3
  size 4517152
runs/Jan11_11-06-22_20e0c788e240/events.out.tfevents.1704971189.20e0c788e240.272.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25b6a29f6190cd75c59639d0d2f0937317a33df0d99e517fb426e84f98e67573
3
+ size 8126
tokenizer.json CHANGED
@@ -34,33 +34,6 @@
34
  "rstrip": false,
35
  "normalized": false,
36
  "special": true
37
- },
38
- {
39
- "id": 32000,
40
- "content": "[PAD]",
41
- "single_word": false,
42
- "lstrip": true,
43
- "rstrip": true,
44
- "normalized": false,
45
- "special": true
46
- },
47
- {
48
- "id": 32001,
49
- "content": "<|im_start|>",
50
- "single_word": false,
51
- "lstrip": false,
52
- "rstrip": false,
53
- "normalized": false,
54
- "special": true
55
- },
56
- {
57
- "id": 32002,
58
- "content": "<|im_end|>",
59
- "single_word": false,
60
- "lstrip": false,
61
- "rstrip": false,
62
- "normalized": false,
63
- "special": true
64
  }
65
  ],
66
  "normalizer": {
 
34
  "rstrip": false,
35
  "normalized": false,
36
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  }
38
  ],
39
  "normalizer": {
tokenizer_config.json CHANGED
@@ -23,42 +23,18 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
- },
27
- "32000": {
28
- "content": "[PAD]",
29
- "lstrip": true,
30
- "normalized": false,
31
- "rstrip": true,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "32001": {
36
- "content": "<|im_start|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "32002": {
44
- "content": "<|im_end|>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
  }
51
  },
52
- "additional_special_tokens": [],
53
  "bos_token": "<s>",
 
54
  "clean_up_tokenization_spaces": false,
55
  "eos_token": "</s>",
56
  "legacy": false,
57
- "model_max_length": 1000000000000000019884624838656,
58
  "pad_token": "</s>",
59
  "padding_side": "right",
60
  "sp_model_kwargs": {},
61
  "tokenizer_class": "LlamaTokenizer",
62
  "unk_token": "<unk>",
63
- "use_default_system_prompt": true
64
  }
 
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  }
27
  },
 
28
  "bos_token": "<s>",
29
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
30
  "clean_up_tokenization_spaces": false,
31
  "eos_token": "</s>",
32
  "legacy": false,
33
+ "model_max_length": 2048,
34
  "pad_token": "</s>",
35
  "padding_side": "right",
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "LlamaTokenizer",
38
  "unk_token": "<unk>",
39
+ "use_default_system_prompt": false
40
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d4e7b220ef94b7c8d6d5fc2116eeeb8ef04dc132b6751d4a84cb723f1763be6
3
  size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a6f2298887ee9449959fc9bc473756de26cc6b4f48f0207d37087a068a09ebc
3
  size 4600