njwright92 commited on
Commit
98ea8e6
1 Parent(s): 47da5c7

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +23 -78
config.json CHANGED
@@ -1,80 +1,25 @@
1
  {
2
- "vocab_size": 32000,
3
- "max_position_embeddings": 32768,
4
- "hidden_size": 4096,
5
- "intermediate_size": 14336,
6
- "num_hidden_layers": 32,
7
- "num_attention_heads": 32,
8
- "sliding_window": null,
9
- "num_key_value_heads": 8,
10
- "hidden_act": "silu",
11
- "initializer_range": 0.02,
12
- "rms_norm_eps": 1e-05,
13
- "use_cache": true,
14
- "rope_theta": 1000000.0,
15
- "attention_dropout": 0.0,
16
- "return_dict": true,
17
- "output_hidden_states": false,
18
- "output_attentions": false,
19
- "torchscript": false,
20
- "torch_dtype": "bfloat16",
21
- "use_bfloat16": false,
22
- "tf_legacy_loss": false,
23
- "pruned_heads": {},
24
- "tie_word_embeddings": false,
25
- "chunk_size_feed_forward": 0,
26
- "is_encoder_decoder": false,
27
- "is_decoder": false,
28
- "cross_attention_hidden_size": null,
29
- "add_cross_attention": false,
30
- "tie_encoder_decoder": false,
31
- "max_length": 20,
32
- "min_length": 0,
33
- "do_sample": false,
34
- "early_stopping": false,
35
- "num_beams": 1,
36
- "num_beam_groups": 1,
37
- "diversity_penalty": 0.0,
38
- "temperature": 1.0,
39
- "top_k": 50,
40
- "top_p": 1.0,
41
- "typical_p": 1.0,
42
- "repetition_penalty": 1.0,
43
- "length_penalty": 1.0,
44
- "no_repeat_ngram_size": 0,
45
- "encoder_no_repeat_ngram_size": 0,
46
- "bad_words_ids": null,
47
- "num_return_sequences": 1,
48
- "output_scores": false,
49
- "return_dict_in_generate": false,
50
- "forced_bos_token_id": null,
51
- "forced_eos_token_id": null,
52
- "remove_invalid_values": false,
53
- "exponential_decay_length_penalty": null,
54
- "suppress_tokens": null,
55
- "begin_suppress_tokens": null,
56
- "architectures": [
57
- "MistralForCausalLM"
58
- ],
59
- "finetuning_task": null,
60
- "id2label": {
61
- "0": "LABEL_0",
62
- "1": "LABEL_1"
63
- },
64
- "label2id": {
65
- "LABEL_0": 0,
66
- "LABEL_1": 1
67
- },
68
- "tokenizer_class": null,
69
- "prefix": null,
70
- "bos_token_id": 1,
71
- "pad_token_id": null,
72
- "eos_token_id": 2,
73
- "sep_token_id": null,
74
- "decoder_start_token_id": null,
75
- "task_specific_params": null,
76
- "problem_type": null,
77
- "_name_or_path": "mistral-community/Mistral-7B-v0.2",
78
- "transformers_version": "4.37.2",
79
- "model_type": "mistral"
80
  }
 
1
  {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 14336,
12
+ "max_position_embeddings": 32768,
13
+ "model_type": "mistral",
14
+ "num_attention_heads": 32,
15
+ "num_hidden_layers": 32,
16
+ "num_key_value_heads": 8,
17
+ "rms_norm_eps": 1e-05,
18
+ "rope_theta": 1000000.0,
19
+ "sliding_window": null,
20
+ "tie_word_embeddings": false,
21
+ "torch_dtype": "bfloat16",
22
+ "transformers_version": "4.39.1",
23
+ "use_cache": true,
24
+ "vocab_size": 32000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  }