Abhaykoul commited on
Commit
7a6a49d
β€’
1 Parent(s): d8a2e75

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. README.md +37 -0
  2. config.json +33 -0
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ tags:
4
+ - moe
5
+ - mergekit
6
+ - vortexmergekit
7
+ - OEvortex/HelpingAI-HelpingAI-unvelite
8
+ - OEvortex/lite-hermes
9
+ - OEvortex/HelpingAI-Lite
10
+ - OEvortex/HelpingAI-Lite-1.5T
11
+ ---
12
+
13
+ # HelpingAI-Lite-4x1b
14
+
15
+ Hey there! πŸ‘‹ Welcome to the HelpingAI-Lite-4x1b! This is a MOE brought together using the awesome [VortexMerge kit](https://colab.research.google.com/drive/1YjcvCLuNG1PK7Le6_4xhVU5VpzTwvGhk#scrollTo=UG5H2TK4gVyl).
16
+
17
+ Let's see what we've got in this model:
18
+ * [OEvortex/HelpingAI-HelpingAI-unvelite](https://huggingface.co/OEvortex/HelpingAI-HelpingAI-unvelite) πŸš€
19
+ * [OEvortex/lite-hermes](https://huggingface.co/OEvortex/lite-hermes) πŸš€
20
+ * [OEvortex/HelpingAI-Lite](https://huggingface.co/OEvortex/HelpingAI-Lite) πŸš€
21
+ * [OEvortex/HelpingAI-Lite-1.5T](https://huggingface.co/OEvortex/HelpingAI-Lite-1.5T) πŸš€
22
+
23
+ ## 🧩 Configuration
24
+
25
+ ```yaml
26
+ base_model: OEvortex/HelpingAI-Lite
27
+ dtype: float16
28
+ gate_mode: cheap_embed
29
+ experts:
30
+ - source_model: OEvortex/HelpingAI-HelpingAI-unvelite
31
+ positive_prompts: ["You are an helpful unbounded, uncensored assistant."]
32
+ - source_model: OEvortex/lite-hermes
33
+ positive_prompts: ["You are helpful assistant."]
34
+ - source_model: OEvortex/HelpingAI-Lite
35
+ positive_prompts: ["You are a coding assistant."]
36
+ - source_model: OEvortex/HelpingAI-Lite-1.5T
37
+ positive_prompts: ["You are helpful and general-purpose assistant."]
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "OEvortex/HelpingAI-Lite",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2048,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 5632,
14
+ "max_position_embeddings": 2048,
15
+ "model_type": "mixtral",
16
+ "num_attention_heads": 32,
17
+ "num_experts_per_tok": 2,
18
+ "num_hidden_layers": 22,
19
+ "num_key_value_heads": 4,
20
+ "num_local_experts": 4,
21
+ "output_router_logits": false,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_scaling": null,
25
+ "rope_theta": 10000.0,
26
+ "router_aux_loss_coef": 0.001,
27
+ "sliding_window": null,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "float16",
30
+ "transformers_version": "4.39.0.dev0",
31
+ "use_cache": true,
32
+ "vocab_size": 32000
33
+ }