id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
yjwon/mpg27_mistral7bv3_sft_dpo_beta5e-2_epoch2 | null | null | "2024-11-12T04:29:11Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta5e-2_epoch3 | null | null | "2024-11-12T04:33:06Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta5e-2_epoch4 | null | null | "2024-11-12T04:36:22Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
yjwon/mpg27_mistral7bv3_sft_dpo_beta5e-2_epoch5 | null | null | "2024-11-12T04:39:25Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.0 | true | 32,768 | null | 0 | 128 | null | null | null |
ColdAsIce123/Phi3_Cold_v5 | null | null | "2024-11-12T04:40:56Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
rxmsantos/distilgpt2-tweetsumm-finetune | null | null | "2024-11-12T04:47:53Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.2 | true | 50,257 | null | null | null | null | null | null |
ColdAsIce123/Phi3_Cold_v6 | null | null | "2024-11-12T04:50:29Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
rawsh/mirrorqwen2.5-0.5b-ORPO-2 | null | null | "2024-11-12T04:53:52Z" | null | null | 4 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"orpo",
"unsloth",
"arxiv:2403.07691",
"base_model:rawsh/mirrorqwen2.5-0.5b-ORPO-1",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-ORPO-1",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |
AlSamCur123/Ministral-7b-instruct-ContinuedFine | null | null | "2024-11-12T04:56:36Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"sft",
"conversational",
"en",
"base_model:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"base_model:finetune:unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 32,768 | null | 0 | 128 | null | null | null |
Onlydrinkwater/llama2-1B-norope-scratch | null | null | "2024-11-12T05:00:21Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 4,096 | llama | 32 | 16 | 8 | 0.00001 | 1,000,000,000,000 | null | false | float32 | 4.46.1 | true | 32,000 | false | 0 | 64 | false | 1 | null |
AI4Chem/ChemVLM-26B-1-2 | null | null | "2024-11-12T05:31:47Z" | null | null | 0 | null | null | null | null | 0 | null | [
"safetensors",
"internvl_chat",
"text-generation",
"conversational",
"custom_code",
"arxiv:2408.07246",
"region:us"
] | text-generation | null | null | 0 | [
"InternVLChatModel"
] | null | null | null | null | null | null | null | internvl_chat | null | null | null | null | null | null | null | bfloat16 | null | null | null | null | null | null | null | null | null |
marcus229/vkods_221 | null | null | "2024-11-12T05:35:56Z" | null | null | 8 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
cmeraki/mimi_124m_8cb | null | null | "2024-11-12T05:37:04Z" | null | null | 18 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt2",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.46.0 | true | 70,016 | null | null | null | null | null | null |
marcus229/vkods_222 | null | null | "2024-11-12T05:42:38Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
shanginn/Qwen2.5-Coder-32B-Instruct-mlx | null | null | "2024-11-12T05:44:00Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
marcus229/vkods_223 | null | null | "2024-11-12T05:44:15Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | false | 32,064 | false | 0 | null | null | null | null |
Rich-J/subnet29_upload_c02_N11_0 | null | null | "2024-11-12T05:53:30Z" | null | null | 2 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"phi3",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Phi3ForCausalLM"
] | 1 | 32,000 | silu | 3,072 | 0.02 | 8,192 | 131,072 | phi3 | 32 | 48 | 32 | 0.00001 | 10,000 | 262,144 | false | bfloat16 | 4.44.0 | true | 32,064 | false | 0 | null | null | null | null |
MrRobotoAI/Unaligned-Base-8b | null | null | "2024-11-12T06:05:10Z" | null | null | 25 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"mergekit",
"merge",
"arxiv:2403.19522",
"base_model:DavidAU/L3.1-Dark-Planet-SpinFire-Uncensored-8B",
"base_model:merge:DavidAU/L3.1-Dark-Planet-SpinFire-Uncensored-8B",
"base_model:ModelsLab/Llama-3.1-8b-Uncensored-Dare",
"base_model:merge:ModelsLab/Llama-3.1-8b-Uncensored-Dare",
"base_model:Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2",
"base_model:merge:Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2",
"base_model:Undi95/Llama-3-LewdPlay-8B",
"base_model:merge:Undi95/Llama-3-LewdPlay-8B",
"base_model:Undi95/Llama-3-LewdPlay-8B-evo",
"base_model:merge:Undi95/Llama-3-LewdPlay-8B-evo",
"base_model:Undi95/Llama-3-Unholy-8B",
"base_model:merge:Undi95/Llama-3-Unholy-8B",
"base_model:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:merge:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:ajibawa-2023/Uncensored-Frank-Llama-3-8B",
"base_model:merge:ajibawa-2023/Uncensored-Frank-Llama-3-8B",
"base_model:hooking-dev/Monah-8b-Uncensored",
"base_model:merge:hooking-dev/Monah-8b-Uncensored",
"base_model:saishf/Aura-Uncensored-OAS-8B-L3",
"base_model:merge:saishf/Aura-Uncensored-OAS-8B-L3",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 1,048,576 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
shanginn/Qwen2.5-Coder-32B-Instruct-mlx-q8 | null | null | "2024-11-12T06:11:53Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"mlx",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:quantized:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
Darshan03/Kannada_FT-v1 | null | null | "2024-11-12T06:14:57Z" | null | null | 6 | null | null | null | null | 0 | transformers | [
"transformers",
"qwen2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | false | 151,936 | null | 0 | null | null | 1 | null |
AIFunOver/OpenCoder-8B-Instruct-openvino-8bit | null | null | "2024-11-12T06:17:31Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"llama",
"text-generation",
"nncf",
"8-bit",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-8B-Instruct",
"base_model:quantized:infly/OpenCoder-8B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 128 | false | 1 | null |
MrRobotoAI/Unaligned-Base-8b-v1a | null | null | "2024-11-12T06:33:09Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"mergekit",
"merge",
"arxiv:2403.19522",
"base_model:MrRobotoAI/Unaligned-Base-8b",
"base_model:merge:MrRobotoAI/Unaligned-Base-8b",
"base_model:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:merge:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:surya-narayanan/college_chemistry",
"base_model:merge:surya-narayanan/college_chemistry",
"base_model:surya-narayanan/college_computer_science",
"base_model:merge:surya-narayanan/college_computer_science",
"base_model:surya-narayanan/college_physics",
"base_model:merge:surya-narayanan/college_physics",
"base_model:surya-narayanan/conceptual_physics",
"base_model:merge:surya-narayanan/conceptual_physics",
"base_model:surya-narayanan/electrical_engineering",
"base_model:merge:surya-narayanan/electrical_engineering",
"base_model:surya-narayanan/formal_logic",
"base_model:merge:surya-narayanan/formal_logic",
"base_model:surya-narayanan/philosophy",
"base_model:merge:surya-narayanan/philosophy",
"base_model:surya-narayanan/philosophy_100",
"base_model:merge:surya-narayanan/philosophy_100",
"base_model:surya-narayanan/philosophy_non_masked",
"base_model:merge:surya-narayanan/philosophy_non_masked",
"base_model:surya-narayanan/physics_non_masked",
"base_model:merge:surya-narayanan/physics_non_masked",
"base_model:surya-narayanan/psychology",
"base_model:merge:surya-narayanan/psychology",
"base_model:surya-narayanan/psychology_non_masked",
"base_model:merge:surya-narayanan/psychology_non_masked",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 1,048,576 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
rawsh/mirrorqwen2.5-0.5b-ORPO-3 | null | null | "2024-11-12T06:33:24Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"generated_from_trainer",
"trl",
"orpo",
"unsloth",
"arxiv:2403.07691",
"base_model:rawsh/mirrorqwen2.5-0.5b-ORPO-2",
"base_model:finetune:rawsh/mirrorqwen2.5-0.5b-ORPO-2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 896 | 0.02 | 4,864 | 32,768 | qwen2 | 14 | 24 | 2 | 0.000001 | 1,000,000 | null | true | float16 | 4.46.2 | false | 151,936 | null | 0 | null | null | null | null |
AIFunOver/OpenCoder-8B-Instruct-openvino-fp16 | null | null | "2024-11-12T06:42:39Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"llama",
"text-generation",
"nncf",
"fp16",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-8B-Instruct",
"base_model:finetune:infly/OpenCoder-8B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 128 | false | 1 | null |
ADT7/Llama-2-v5 | null | null | "2024-11-12T06:47:20Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.46.2 | true | 32,000 | false | 0 | 128 | false | 1 | null |
MrRobotoAI/Unaligned-Base-8b-v1b | null | null | "2024-11-12T06:49:07Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"mergekit",
"merge",
"arxiv:2403.19522",
"base_model:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:merge:Azazelle/Llama-3-8B-Abomination-LORA",
"base_model:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:merge:Azazelle/Llama-3-LimaRP-Instruct-LoRA-8B",
"base_model:Azazelle/Llama-3-LongStory-LORA",
"base_model:merge:Azazelle/Llama-3-LongStory-LORA",
"base_model:Blackroot/Llama3-RP-Lora",
"base_model:merge:Blackroot/Llama3-RP-Lora",
"base_model:MrRobotoAI/Unaligned-Base-8b-v1a",
"base_model:merge:MrRobotoAI/Unaligned-Base-8b-v1a",
"base_model:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:merge:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:nothingiisreal/llama3-8B-DWP-lora",
"base_model:merge:nothingiisreal/llama3-8B-DWP-lora",
"base_model:surya-narayanan/electrical_engineering",
"base_model:merge:surya-narayanan/electrical_engineering",
"base_model:surya-narayanan/high_school_european_history",
"base_model:merge:surya-narayanan/high_school_european_history",
"base_model:surya-narayanan/human_sexuality",
"base_model:merge:surya-narayanan/human_sexuality",
"base_model:surya-narayanan/sociology",
"base_model:merge:surya-narayanan/sociology",
"base_model:surya-narayanan/world_religions",
"base_model:merge:surya-narayanan/world_religions",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 1,048,576 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
thanhns/mergekit-slerp-jgypmpr | null | null | "2024-11-12T06:52:30Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Equall/Saul-7B-Base",
"base_model:merge:Equall/Saul-7B-Base",
"base_model:HuggingFaceH4/zephyr-7b-beta",
"base_model:merge:HuggingFaceH4/zephyr-7b-beta",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 10,000 | 4,096 | false | bfloat16 | 4.46.2 | true | 32,000 | null | 0 | 128 | null | null | null |
nhyha/N3N_gemma-2-9b-it_20241110_2026 | null | null | "2024-11-12T06:59:35Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"base_model:unsloth/gemma-2-9b-it",
"base_model:finetune:unsloth/gemma-2-9b-it",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.46.1 | true | 256,000 | false | 0 | 256 | null | null | null |
c01zaut/Qwen2.5-Coder-1.5B-Instruct-rk3588-1.1.1 | null | null | "2024-11-12T07:03:36Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-1.5B",
"base_model:finetune:Qwen/Qwen2.5-Coder-1.5B",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.44.0 | true | 151,936 | null | 0 | null | null | null | null |
XelotX/Qwen2.5-Coder-32B-Instruct-Original | null | null | "2024-11-12T07:21:57Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"codeqwen",
"chat",
"qwen",
"qwen-coder",
"conversational",
"en",
"arxiv:2409.12186",
"arxiv:2309.00071",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-Coder-32B",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 5,120 | 0.02 | 27,648 | 32,768 | qwen2 | 40 | 64 | 8 | 0.000001 | 1,000,000 | 131,072 | false | bfloat16 | 4.43.1 | true | 152,064 | null | 0 | null | null | null | null |
0xayman/Qwen-1.5B-fc-v5 | null | null | "2024-11-12T07:22:51Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 1,536 | 0.02 | 8,960 | 32,768 | qwen2 | 12 | 28 | 2 | 0.000001 | 1,000,000 | null | true | float32 | 4.46.2 | true | 151,936 | null | 0 | null | null | null | null |
DiatWork/GPT-Neox-MentalHealth-Finetune | null | null | "2024-11-12T07:31:15Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gpt_neo",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"GPTNeoForCausalLM"
] | 50,256 | 50,256 | null | 2,560 | 0.02 | null | 2,048 | gpt_neo | null | null | null | null | null | null | null | float16 | 4.46.2 | true | 50,257 | null | 0 | null | null | null | null |
dsouzapeter/bloom-560m-8bit | null | null | "2024-11-12T07:40:17Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"bloom",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"8-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"BloomForCausalLM"
] | 1 | 2 | null | 1,024 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | float16 | 4.44.2 | true | 250,880 | null | 0 | null | null | 1 | null |
schuler/experimental-JP47D04 | null | null | "2024-11-12T07:49:51Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"kphi3",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"KPhi3ForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 8,192 | 4,096 | kphi3 | 32 | 2 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.44.2 | true | 32,064 | null | 0 | null | null | null | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_stereo_version_1_2_subset_epoch2 | null | null | "2024-11-12T07:51:42Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
rombodawg/Rombos-Coder-V2.5-Qwen-14b | null | null | "2024-11-12T07:52:21Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-14B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-14B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 13,824 | 32,768 | qwen2 | 40 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
neeleshg23/jamba-from-hf | null | null | "2024-11-12T07:59:59Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"jamba",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"JambaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 2,048 | jamba | 32 | 8 | 8 | 0.000001 | null | null | false | float32 | 4.46.2 | true | 128,000 | null | 0 | null | null | null | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_stereo_version_1_2_3_4_subset_epoch2 | null | null | "2024-11-12T08:03:42Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_stereo_version_1_2_3_4_5_6_subset_epoch2 | null | null | "2024-11-12T08:04:01Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
DopeorNope/GPT4obased-Math7Bs | null | null | "2024-11-12T08:07:10Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.45.2 | false | 32,001 | false | 0 | 128 | false | 1 | null |
rombodawg/Rombos-Coder-V2.5-Qwen-32b | null | null | "2024-11-12T08:07:36Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"code",
"qwen",
"qwen-coder",
"codeqwen",
"conversational",
"en",
"base_model:Qwen/Qwen2.5-Coder-32B-Instruct",
"base_model:finetune:Qwen/Qwen2.5-Coder-32B-Instruct",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 5,120 | 0.02 | 27,648 | 131,072 | qwen2 | 40 | 64 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
DopeorNope/Mumath-10k | null | null | "2024-11-12T08:09:33Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.45.2 | false | 32,001 | false | 0 | 128 | false | 1 | null |
AIFunOver/OpenCoder-8B-Instruct-openvino-4bit | null | null | "2024-11-12T08:11:42Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"openvino",
"llama",
"text-generation",
"nncf",
"4-bit",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-8B-Instruct",
"base_model:quantized:infly/OpenCoder-8B-Instruct",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 128 | false | 1 | null |
marcus2000/timelist_dpo_timelist_keymoments_v0_lora | null | null | "2024-11-12T08:12:33Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_stereo_version_1_2_3_4_5_6_7_8_subset_epoch2 | null | null | "2024-11-12T08:17:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
allknowingroger/QwenSlerp7-7B | null | null | "2024-11-12T08:24:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:allknowingroger/Qwen2.5-7B-task4",
"base_model:merge:allknowingroger/Qwen2.5-7B-task4",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_version_1_2_subset_epoch2 | null | null | "2024-11-12T08:29:05Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
AIFunOver/OpenCoder-1.5B-Instruct-openvino-8bit | null | null | "2024-11-12T08:30:20Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"openvino",
"llama",
"text-generation",
"nncf",
"8-bit",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-1.5B-Instruct",
"base_model:quantized:infly/OpenCoder-1.5B-Instruct",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 2,240 | 0.02 | 6,144 | 4,096 | llama | 14 | 24 | 14 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 160 | false | 1 | null |
allknowingroger/QwenSlerp8-7B | null | null | "2024-11-12T08:31:27Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:allknowingroger/Qwen2.5-7B-task8",
"base_model:merge:allknowingroger/Qwen2.5-7B-task8",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
AIFunOver/OpenCoder-1.5B-Instruct-openvino-fp16 | null | null | "2024-11-12T08:37:07Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"openvino",
"llama",
"text-generation",
"nncf",
"fp16",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-1.5B-Instruct",
"base_model:finetune:infly/OpenCoder-1.5B-Instruct",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 2,240 | 0.02 | 6,144 | 4,096 | llama | 14 | 24 | 14 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 160 | false | 1 | null |
allknowingroger/QwenSlerp9-7B | null | null | "2024-11-12T08:38:25Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:merge:Marsouuu/lareneg3Bv2-ECE-PRYMMAL-Martial",
"base_model:allknowingroger/Qwen2.5-7B-task2",
"base_model:merge:allknowingroger/Qwen2.5-7B-task2",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
rica40325/linear_merge_1113 | null | null | "2024-11-12T08:41:54Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"mergekit",
"merge",
"conversational",
"arxiv:2203.05482",
"base_model:rica40325/10_14dpo",
"base_model:merge:rica40325/10_14dpo",
"base_model:rica40325/nsfw_dpo_1106",
"base_model:merge:rica40325/nsfw_dpo_1106",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 14,336 | 1,024,000 | mistral | 32 | 40 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.46.1 | true | 131,072 | null | 0 | 128 | null | null | null |
marcus2000/timelist_dpo_timelist_keymoments_v1 | null | null | "2024-11-12T08:42:59Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 8,192 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | true | 128,256 | false | 0 | 128 | false | 1 | null |
2z299/aya-expanse-32b-GPTQ-4bit | null | null | "2024-11-12T08:45:08Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"cohere",
"text-generation",
"conversational",
"base_model:CohereForAI/aya-expanse-32b",
"base_model:quantized:CohereForAI/aya-expanse-32b",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 0 | [
"CohereForCausalLM"
] | 5 | 255,001 | silu | 8,192 | 0.02 | 24,576 | 8,192 | cohere | 64 | 40 | 8 | null | 4,000,000 | null | null | float16 | 4.47.0.dev0 | true | 256,000 | false | 0 | null | null | null | null |
2ndBestKiller/CaTinyLlama_M2_CA_TinyLlaama_M2 | null | null | "2024-11-12T08:49:05Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"sft",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float32 | 4.45.0.dev0 | true | 32,000 | false | 0 | 64 | false | 1 | null |
Conspirators/asdf | null | null | "2024-11-12T08:49:30Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"pytorch",
"qwen2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"conversational",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 3,584 | 0.02 | 18,944 | 32,768 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | float16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
ManukyanD/gemma-doc-vqa-v6-checkpoint-2 | null | null | "2024-11-12T08:51:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | float32 | 4.44.2 | true | 256,000 | false | 0 | 256 | null | null | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_version_1_2_3_4_5_6_subset_epoch2 | null | null | "2024-11-12T08:52:16Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
AIFunOver/OpenCoder-1.5B-Instruct-openvino-4bit | null | null | "2024-11-12T08:56:49Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"openvino",
"llama",
"text-generation",
"nncf",
"4-bit",
"conversational",
"en",
"zh",
"dataset:OpenCoder-LLM/opencoder-sft-stage1",
"dataset:OpenCoder-LLM/opencoder-sft-stage2",
"base_model:infly/OpenCoder-1.5B-Instruct",
"base_model:quantized:infly/OpenCoder-1.5B-Instruct",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 96,540 | 96,539 | silu | 2,240 | 0.02 | 6,144 | 4,096 | llama | 14 | 24 | 14 | 0.00001 | 10,000 | null | false | bfloat16 | 4.45.2 | true | 96,640 | false | 0 | 160 | false | 1 | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_stereo_epoch2 | null | null | "2024-11-12T09:03:54Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
bunnycore/QandoraExp-7B-Persona | null | null | "2024-11-12T09:05:00Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"mergekit",
"merge",
"conversational",
"base_model:bunnycore/QandoraExp-7B",
"base_model:merge:bunnycore/QandoraExp-7B",
"base_model:bunnycore/Qwen-2.1-7b-Persona-lora_model",
"base_model:merge:bunnycore/Qwen-2.1-7b-Persona-lora_model",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,643 | silu | 3,584 | 0.02 | 18,944 | 131,072 | qwen2 | 28 | 28 | 4 | 0.000001 | 1,000,000 | null | false | bfloat16 | 4.46.2 | true | 152,064 | null | 0 | null | null | null | null |
Just-ln-Case/llama_classic_model | null | null | "2024-11-12T09:13:32Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama_classic",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LLaMAClassic"
] | null | null | null | null | null | null | null | llama_classic | null | null | null | null | null | null | null | float32 | 4.44.2 | null | 32,000 | null | null | null | null | null | null |
ryan98153/removeLLM-fine-tuned2 | null | null | "2024-11-12T09:18:23Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 2,048 | llama | 15 | 32 | 5 | 0.00001 | 10,000 | null | true | float32 | 4.44.2 | true | 49,152 | false | 0 | null | false | 1 | null |
sartifyllc/Pawa-Base-9B-V0.2 | null | null | "2024-11-12T09:24:47Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"gemma2",
"text-generation",
"text-generation-inference",
"unsloth",
"trl",
"en",
"base_model:unsloth/gemma-2-9b",
"base_model:finetune:unsloth/gemma-2-9b",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Gemma2ForCausalLM"
] | 2 | 1 | gelu_pytorch_tanh | 3,584 | 0.02 | 14,336 | 8,192 | gemma2 | 16 | 42 | 8 | 0.000001 | 10,000 | 4,096 | null | bfloat16 | 4.47.0.dev0 | true | 256,000 | false | 0 | 256 | null | null | null |
neeleshg23/jamba-half-from-hf | null | null | "2024-11-12T09:30:48Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"jamba",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"JambaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 2,048 | jamba | 8 | 4 | 8 | 0.000001 | null | null | true | float32 | 4.46.2 | true | 128,000 | null | 0 | null | null | null | null |
Mortie1/new-nlp-hw3-llama2 | null | null | "2024-11-12T09:35:14Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"LLaMa",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MyLLaMa"
] | null | null | null | null | null | null | null | LLaMa | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | null | null | null | null | null | null | null |
Mortie1/new-nlp-hw3-llama3 | null | null | "2024-11-12T09:45:40Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"LLaMa",
"text-generation",
"custom_code",
"arxiv:1910.09700",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MyLLaMa"
] | null | null | null | null | null | null | null | LLaMa | null | null | null | null | null | null | null | float32 | 4.47.0.dev0 | null | null | null | null | null | null | null | null |
neeleshg23/jamba-8-experts | null | null | "2024-11-12T09:46:23Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"jamba",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"JambaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 2,048 | jamba | 64 | 4 | 8 | 0.000001 | null | null | true | float32 | 4.46.2 | true | 128,000 | null | 0 | null | null | null | null |
sania963/sql_v7 | null | null | "2024-11-12T09:47:00Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"mistral",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"MistralForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 32,768 | mistral | 32 | 32 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.47.0.dev0 | true | 32,768 | null | 0 | 128 | null | null | null |
voidful/SmolLM2-360M-Instruct-TTS | null | null | "2024-11-12T09:52:18Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 960 | 0.02 | 2,560 | 8,192 | llama | 15 | 32 | 5 | 0.00001 | 100,000 | null | true | float32 | 4.45.2 | true | 62,110 | false | 0 | 64 | false | 1 | null |
KaKee/llama-2-13b-chat_own_build_dataset_7th_version_1_2_3_4_subset_epoch2 | null | null | "2024-11-12T10:23:31Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
TheAwkwardAlienGuy/llama-2-7b-English-Knowledge | null | null | "2024-11-12T10:28:09Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"trl",
"sft",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"bitsandbytes",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float32 | 4.46.2 | false | 32,000 | false | 0 | 128 | false | 1 | null |
c01zaut/Qwen2.5-3B-Instruct-rk3588-1.1.1 | null | null | "2024-11-12T10:30:40Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"qwen2",
"text-generation",
"chat",
"conversational",
"en",
"arxiv:2407.10671",
"base_model:Qwen/Qwen2.5-3B",
"base_model:finetune:Qwen/Qwen2.5-3B",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"Qwen2ForCausalLM"
] | 151,643 | 151,645 | silu | 2,048 | 0.02 | 11,008 | 32,768 | qwen2 | 16 | 36 | 2 | 0.000001 | 1,000,000 | 32,768 | true | bfloat16 | 4.43.1 | true | 151,936 | null | 0 | null | null | null | null |
slobers/spinkle2 | null | null | "2024-11-12T10:31:11Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |
MrRobotoAI/Unaligned-Base-8b-1024K | null | null | "2024-11-12T10:31:16Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"mergekit",
"merge",
"arxiv:2403.19522",
"base_model:Azazelle/L3-Daybreak-8b-lora",
"base_model:merge:Azazelle/L3-Daybreak-8b-lora",
"base_model:Azazelle/Llama-3-LongStory-LORA",
"base_model:merge:Azazelle/Llama-3-LongStory-LORA",
"base_model:Azazelle/Nimue-8B",
"base_model:merge:Azazelle/Nimue-8B",
"base_model:Azazelle/Smarts_Llama3",
"base_model:merge:Azazelle/Smarts_Llama3",
"base_model:Blackroot/Llama-3-LongStory-LORA",
"base_model:merge:Blackroot/Llama-3-LongStory-LORA",
"base_model:MrRobotoAI/Unaligned-Base-8b-v1b",
"base_model:merge:MrRobotoAI/Unaligned-Base-8b-v1b",
"base_model:ResplendentAI/Aura_Llama3",
"base_model:merge:ResplendentAI/Aura_Llama3",
"base_model:ResplendentAI/BlueMoon_Llama3",
"base_model:merge:ResplendentAI/BlueMoon_Llama3",
"base_model:ResplendentAI/Luna_Llama3",
"base_model:merge:ResplendentAI/Luna_Llama3",
"base_model:ResplendentAI/NoWarning_Llama3",
"base_model:merge:ResplendentAI/NoWarning_Llama3",
"base_model:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:merge:ResplendentAI/Theory_of_Mind_Llama3",
"base_model:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:merge:aifeifei798/llama3-8B-DarkIdol-2.2-Uncensored-1048K",
"base_model:nicce/story-mixtral-8x7b-lora",
"base_model:merge:nicce/story-mixtral-8x7b-lora",
"base_model:vincentyandex/lora_llama3_chunked_novel_bs128",
"base_model:merge:vincentyandex/lora_llama3_chunked_novel_bs128",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 128,000 | 128,001 | silu | 4,096 | 0.02 | 14,336 | 1,048,576 | llama | 32 | 32 | 8 | 0.00001 | 500,000 | null | false | float16 | 4.46.2 | false | 128,256 | false | 0 | 128 | false | 1 | null |
neeleshg23/jamba-2.7b | null | null | "2024-11-12T10:36:10Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"jamba",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"JambaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 2,048 | jamba | 16 | 4 | 8 | 0.000001 | null | null | true | float32 | 4.46.2 | true | 128,000 | null | 0 | null | null | null | null |
KaKee/llama-2-7b-chat_own_build_dataset_7th_stereo_epoch2 | null | null | "2024-11-12T10:43:15Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"conversational",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | 10,000 | null | false | float16 | 4.44.2 | true | 32,000 | false | 0 | null | false | 1 | null |
neeleshg23/jamba-1.9b | null | null | "2024-11-12T10:49:45Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"jamba",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"JambaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 14,336 | 2,048 | jamba | 16 | 3 | 8 | 0.000001 | null | null | true | float32 | 4.46.2 | true | 128,000 | null | 0 | null | null | null | null |
parrottygg/LlamaSmallv1 | null | null | "2024-11-12T10:57:43Z" | null | null | 0 | null | null | null | null | 0 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"arxiv:1910.09700",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 0 | [
"LlamaForCausalLM"
] | 100,257 | 100,257 | silu | 4,096 | 0.02 | 14,208 | 4,096 | llama | 32 | 28 | 8 | 0.00001 | 500,000 | null | false | bfloat16 | 4.44.0 | false | 100,288 | false | 0 | 128 | false | 1 | null |