id
stringlengths 7
117
| author
stringclasses 6
values | sha
null | created_at
unknown | last_modified
null | disabled
null | downloads
int64 0
18.6M
| downloads_all_time
null | gated
bool 1
class | gguf
null | inference
null | likes
int64 0
4.77k
| library_name
stringclasses 36
values | tags
sequencelengths 1
430
| pipeline_tag
stringclasses 32
values | mask_token
null | model_index
null | trending_score
int64 0
132
| architectures
sequencelengths 1
5
⌀ | bos_token_id
int64 -1
256k
⌀ | eos_token_id
int64 -1
256k
⌀ | hidden_act
stringclasses 15
values | hidden_size
int64 1
20.5k
⌀ | initializer_range
float64 0
1
⌀ | intermediate_size
int64 1
98.3k
⌀ | max_position_embeddings
int64 8
1.05M
⌀ | model_type
stringclasses 530
values | num_attention_heads
int64 1
5k
⌀ | num_hidden_layers
int64 -1
8.93k
⌀ | num_key_value_heads
int64 1
160
⌀ | rms_norm_eps
float64 0
7
⌀ | rope_theta
float64 1k
1,000B
⌀ | sliding_window
int64 0
262k
⌀ | tie_word_embeddings
bool 2
classes | torch_dtype
stringclasses 8
values | transformers_version
stringclasses 207
values | use_cache
bool 2
classes | vocab_size
int64 -1
5.03M
⌀ | attention_bias
bool 2
classes | attention_dropout
float64 0
0.5
⌀ | head_dim
int64 2
256
⌀ | mlp_bias
bool 2
classes | pretraining_tp
int64 0
8
⌀ | rope_scaling
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
glaiveai/glaive-function-calling-v1 | null | null | "2023-08-08T10:57:23Z" | null | null | 35 | null | null | null | null | 67 | transformers | [
"transformers",
"pytorch",
"mpt",
"text-generation",
"custom_code",
"dataset:glaiveai/glaive-function-calling",
"license:cc-by-sa-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"MPTForCausalLM"
] | null | 1 | null | null | null | null | null | mpt | null | null | null | null | null | null | null | float32 | 4.28.0 | true | 32,768 | null | null | null | null | null | null |
WizardLMTeam/WizardLM-70B-V1.0 | null | null | "2023-08-09T05:26:23Z" | null | null | 3,901 | null | null | null | null | 235 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2304.12244",
"arxiv:2306.08568",
"arxiv:2308.09583",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,001 | null | null | null | null | 1 | null |
sinarashidi/llama-2-7b-chat-persian | null | null | "2023-08-09T07:49:50Z" | null | null | 62 | null | null | null | null | 6 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
clibrain/Llama-2-7b-ft-instruct-es | null | null | "2023-08-09T13:24:07Z" | null | null | 1,402 | null | null | null | null | 25 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"es",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
ai-forever/mGPT-1.3B-romanian | null | null | "2023-08-10T05:11:57Z" | null | null | 128 | null | null | null | null | 2 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"gpt3",
"mgpt",
"ro",
"en",
"ru",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.10.3 | true | 100,000 | null | null | null | null | null | null |
MayaPH/GodziLLa2-70B | null | null | "2023-08-10T17:05:37Z" | null | null | 1,076 | null | null | null | null | 38 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"merge",
"mix",
"cot",
"dataset:mlabonne/guanaco-llama2-1k",
"arxiv:1903.00161",
"arxiv:2009.03300",
"arxiv:1803.05457",
"arxiv:1905.07830",
"arxiv:2109.07958",
"arxiv:1907.10641",
"arxiv:2110.14168",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float32 | 4.32.0.dev0 | false | 32,000 | null | null | null | null | 1 | null |
Gryphe/MythoMax-L2-13b | null | null | "2023-08-10T20:35:34Z" | null | null | 20,050 | null | null | null | null | 265 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
WizardLMTeam/WizardMath-70B-V1.0 | null | null | "2023-08-11T04:33:24Z" | null | null | 278 | null | null | null | null | 119 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"arxiv:2304.12244",
"arxiv:2306.08568",
"arxiv:2308.09583",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 2,048 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,002 | null | null | null | null | 1 | null |
TheBloke/MythoMax-L2-13B-GPTQ | null | null | "2023-08-11T07:27:24Z" | null | null | 6,341 | null | null | null | null | 185 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"en",
"base_model:Gryphe/MythoMax-L2-13b",
"base_model:quantized:Gryphe/MythoMax-L2-13b",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
PharMolix/BioMedGPT-LM-7B | null | null | "2023-08-11T11:28:35Z" | null | null | 4,235 | null | null | null | null | 67 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"medical",
"dataset:biomed",
"arxiv:2308.09442",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float32 | 4.28.1 | false | 32,000 | null | null | null | null | 1 | null |
Universal-NER/UniNER-7B-type-sup | null | null | "2023-08-11T18:12:34Z" | null | null | 2,164 | null | null | null | null | 8 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"arxiv:2308.03279",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float32 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
Universal-NER/UniNER-7B-all | null | null | "2023-08-11T20:52:49Z" | null | null | 7,437 | null | null | null | null | 89 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"arxiv:2308.03279",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float32 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
defog/sqlcoder | null | null | "2023-08-11T21:55:26Z" | null | null | 491 | null | null | null | null | 310 | transformers | [
"transformers",
"pytorch",
"gpt_bigcode",
"text-generation",
"code",
"en",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"GPTBigCodeForCausalLM"
] | 0 | 0 | null | null | 0.02 | null | null | gpt_bigcode | null | null | null | null | null | null | null | bfloat16 | 4.31.0 | true | 49,152 | null | null | null | null | null | null |
Trelis/Llama-2-7b-chat-hf-hosted-inference-8bit | null | null | "2023-08-12T17:08:46Z" | null | null | 156 | null | null | null | null | 7 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"facebook",
"meta",
"llama-2",
"hosted inference",
"8 bit",
"8bit",
"8-bit",
"en",
"arxiv:2307.09288",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
KoboldAI/LLaMA2-13B-Holomax | null | null | "2023-08-14T14:26:32Z" | null | null | 1,794 | null | null | null | null | 21 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.31.0 | false | 32,000 | null | null | null | null | 1 | null |
inkoziev/chargpt-96M | null | null | "2023-08-15T11:18:43Z" | null | null | 63 | null | null | null | null | 2 | transformers | [
"transformers",
"pytorch",
"gpt2",
"text-generation",
"causal-lm",
"ru",
"license:openrail",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 1 | 2 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.28.1 | true | 658 | null | null | null | null | null | null |
ajibawa-2023/scarlett-33b | null | null | "2023-08-15T16:41:08Z" | null | null | 832 | null | null | null | null | 25 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"license:cc-by-nc-nd-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 6,656 | 0.02 | 17,920 | 2,048 | llama | 52 | 60 | 52 | 0.000001 | null | null | false | bfloat16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
tifa-benchmark/llama2_tifa_question_generation | null | null | "2023-08-16T00:41:50Z" | null | null | 2,978 | null | null | null | null | 9 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text-generation-inference",
"llama2",
"text-to-image",
"en",
"dataset:TIFA",
"arxiv:2303.11897",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
Voicelab/trurl-2-13b | null | null | "2023-08-16T07:36:18Z" | null | null | 1,176 | null | null | null | null | 29 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"voicelab",
"llama-2",
"trurl",
"trurl-2",
"en",
"pl",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | false | 32,001 | null | null | null | null | 1 | null |
Voicelab/trurl-2-7b | null | null | "2023-08-16T09:42:52Z" | null | null | 3,908 | null | null | null | null | 16 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"voicelab",
"llama-2",
"trurl",
"trurl-2",
"en",
"pl",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | bfloat16 | 4.31.0 | false | 32,001 | null | null | null | null | 1 | null |
Qwen/Qwen-VL | null | null | "2023-08-18T02:20:59Z" | null | null | 63,085 | null | null | null | null | 211 | transformers | [
"transformers",
"pytorch",
"qwen",
"text-generation",
"custom_code",
"zh",
"en",
"arxiv:2308.12966",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"QWenLMHeadModel"
] | null | null | null | 4,096 | 0.02 | 22,016 | 8,192 | qwen | 32 | 32 | null | null | null | null | false | bfloat16 | 4.31.0 | true | 151,936 | null | null | null | null | null | null |
casperhansen/falcon-7b-awq | null | null | "2023-08-19T13:17:49Z" | null | null | 107 | null | null | null | null | 1 | transformers | [
"transformers",
"pytorch",
"RefinedWebModel",
"text-generation",
"custom_code",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"RWForCausalLM"
] | 11 | 11 | null | 4,544 | 0.02 | null | null | RefinedWebModel | null | null | null | null | null | null | null | float16 | 4.31.0 | true | 65,024 | null | 0 | null | null | null | null |
tyang816/MedChatZH | null | null | "2023-08-22T02:23:36Z" | null | null | 46 | null | null | null | null | 6 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"zh",
"dataset:tyang816/MedChatZH",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaiChuanForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | baichuan | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.30.2 | false | 64,000 | null | null | null | null | null | null |
Trelis/Llama-2-7b-chat-hf-function-calling-v2 | null | null | "2023-08-22T16:28:48Z" | null | null | 650 | null | null | null | null | 130 | transformers | [
"transformers",
"safetensors",
"gguf",
"llama",
"text-generation",
"facebook",
"meta",
"pytorch",
"llama-2",
"functions",
"function calling",
"sharded",
"en",
"arxiv:2307.09288",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | 10,000 | null | false | float16 | 4.35.0.dev0 | true | 32,000 | false | null | null | null | 1 | null |
yongzx/pythia-1b-sft-hh | null | null | "2023-08-23T11:09:39Z" | null | null | 318 | null | null | null | null | 1 | transformers | [
"transformers",
"pytorch",
"gpt_neox",
"text-generation",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 0 | 0 | gelu | 2,048 | 0.02 | 8,192 | 2,048 | gpt_neox | 8 | 16 | null | null | null | null | false | float16 | 4.31.0 | true | 50,304 | null | 0 | null | null | null | null |
xinlai/LISA-13B-llama2-v1 | null | null | "2023-08-23T11:34:04Z" | null | null | 14,594 | null | null | null | null | 5 | transformers | [
"transformers",
"pytorch",
"llava",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LISAForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llava | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | false | 32,003 | null | null | null | null | 1 | null |
openskyml/pigeon-llm | null | null | "2023-08-23T17:08:33Z" | null | null | 34 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"openskyml",
"chat",
"cpu",
"en",
"ru",
"es",
"fr",
"ch",
"zh",
"it",
"pt",
"pl",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.0.dev0 | true | 32,000 | null | null | null | null | null | null |
codellama/CodeLlama-7b-hf | null | null | "2023-08-24T16:31:11Z" | null | null | 95,879 | null | null | null | null | 329 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,016 | null | null | null | null | 1 | null |
codellama/CodeLlama-7b-Python-hf | null | null | "2023-08-24T16:31:28Z" | null | null | 22,649 | null | null | null | null | 132 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
codellama/CodeLlama-13b-hf | null | null | "2023-08-24T16:31:44Z" | null | null | 6,560 | null | null | null | null | 101 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 16,384 | llama | 40 | 40 | 40 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.32.0.dev0 | true | 32,016 | null | null | null | null | 1 | null |
codellama/CodeLlama-7b-Instruct-hf | null | null | "2023-08-24T16:33:37Z" | null | null | 91,275 | null | null | null | null | 215 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"conversational",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,016 | null | null | null | null | 1 | null |
codellama/CodeLlama-13b-Instruct-hf | null | null | "2023-08-24T16:33:54Z" | null | null | 59,402 | null | null | null | null | 143 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"conversational",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 16,384 | llama | 40 | 40 | 40 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.32.0.dev0 | true | 32,016 | null | null | null | null | 1 | null |
codellama/CodeLlama-34b-hf | null | null | "2023-08-24T16:34:39Z" | null | null | 9,028 | null | null | null | null | 167 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
codellama/CodeLlama-34b-Instruct-hf | null | null | "2023-08-24T16:58:22Z" | null | null | 13,806 | null | null | null | null | 277 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"llama-2",
"conversational",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/CodeLlama-7B-Python-GGUF | null | null | "2023-08-24T17:02:03Z" | null | null | 4,983 | null | null | null | null | 53 | transformers | [
"transformers",
"gguf",
"llama",
"llama-2",
"text-generation",
"code",
"arxiv:2308.12950",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/CodeLlama-7B-GGUF | null | null | "2023-08-24T17:02:09Z" | null | null | 9,509 | null | null | null | null | 108 | transformers | [
"transformers",
"gguf",
"llama",
"llama-2",
"text-generation",
"code",
"arxiv:2308.12950",
"base_model:codellama/CodeLlama-7b-hf",
"base_model:quantized:codellama/CodeLlama-7b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/CodeLlama-13B-Instruct-GGUF | null | null | "2023-08-24T20:22:41Z" | null | null | 6,627 | null | null | null | null | 116 | transformers | [
"transformers",
"gguf",
"llama",
"llama-2",
"text-generation",
"code",
"arxiv:2308.12950",
"base_model:codellama/CodeLlama-13b-Instruct-hf",
"base_model:quantized:codellama/CodeLlama-13b-Instruct-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/CodeLlama-7B-Instruct-GPTQ | null | null | "2023-08-24T20:27:24Z" | null | null | 34,039 | null | null | null | null | 46 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"llama-2",
"custom_code",
"code",
"arxiv:2308.12950",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | float16 | 4.32.0 | true | 32,016 | null | null | null | null | 1 | null |
DAMO-NLP/SeqGPT-560M | null | null | "2023-08-25T02:57:11Z" | null | null | 74 | null | null | null | null | 15 | transformers | [
"transformers",
"pytorch",
"bloom",
"text-generation",
"en",
"zh",
"arxiv:2308.10529",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"BloomForCausalLM"
] | 1 | 2 | null | 1,024 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | float16 | 4.28.1 | true | 250,880 | null | 0 | null | null | 1 | null |
Phind/Phind-CodeLlama-34B-Python-v1 | null | null | "2023-08-25T20:33:09Z" | null | null | 869 | null | null | null | null | 252 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code llama",
"license:llama2",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
WizardLMTeam/WizardCoder-Python-34B-V1.0 | null | null | "2023-08-26T03:59:07Z" | null | null | 340 | null | null | null | null | 764 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code",
"arxiv:2304.12244",
"arxiv:2306.08568",
"arxiv:2308.09583",
"license:llama2",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | float16 | 4.31.0 | false | 32,001 | null | null | null | null | 1 | null |
sakuraumi/Sakura-13B-Galgame | null | null | "2023-08-26T16:28:53Z" | null | null | 94 | null | null | null | null | 111 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"zh",
"ja",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaichuanForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,696 | null | baichuan | 40 | 40 | null | 0.000001 | null | null | false | bfloat16 | 4.33.2 | false | 125,696 | null | null | null | null | null | null |
axiong/PMC_LLaMA_13B | null | null | "2023-08-28T05:38:32Z" | null | null | 2,474 | null | null | null | null | 32 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:openrail",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 0 | 1 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float32 | 4.28.1 | true | 32,001 | null | null | null | null | null | null |
elyza/ELYZA-japanese-Llama-2-7b | null | null | "2023-08-28T12:38:34Z" | null | null | 2,840 | null | null | null | null | 94 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"ja",
"en",
"arxiv:2307.09288",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float16 | 4.30.2 | true | 32,000 | null | null | null | null | 1 | null |
elyza/ELYZA-japanese-Llama-2-7b-instruct | null | null | "2023-08-28T12:58:25Z" | null | null | 19,136 | null | null | null | null | 64 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"ja",
"en",
"arxiv:2307.09288",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float16 | 4.30.2 | true | 32,000 | null | null | null | null | 1 | null |
elyza/ELYZA-japanese-Llama-2-7b-fast | null | null | "2023-08-28T13:17:58Z" | null | null | 1,864 | null | null | null | null | 25 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"ja",
"en",
"arxiv:2307.09288",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | null | null | false | float16 | 4.30.2 | true | 45,043 | null | null | null | null | 1 | null |
Phind/Phind-CodeLlama-34B-v2 | null | null | "2023-08-28T21:29:09Z" | null | null | 3,677 | null | null | null | null | 829 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code llama",
"license:llama2",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
baichuan-inc/Baichuan2-7B-Chat | null | null | "2023-08-29T02:21:41Z" | null | null | 21,368 | null | null | null | null | 158 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"en",
"zh",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaichuanForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | baichuan | 32 | 32 | null | 0.000001 | null | null | false | bfloat16 | 4.29.2 | true | 125,696 | null | null | null | null | null | null |
baichuan-inc/Baichuan2-13B-Chat | null | null | "2023-08-29T02:30:01Z" | null | null | 103,257 | null | null | null | null | 421 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"en",
"zh",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaichuanForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,696 | null | baichuan | 40 | 40 | null | 0.000001 | null | null | false | bfloat16 | 4.29.2 | true | 125,696 | null | null | null | null | null | null |
baichuan-inc/Baichuan2-7B-Base | null | null | "2023-08-30T10:11:04Z" | null | null | 1,900 | null | null | null | null | 77 | transformers | [
"transformers",
"pytorch",
"baichuan",
"text-generation",
"custom_code",
"en",
"zh",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BaichuanForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | baichuan | 32 | 32 | null | 0.000001 | null | null | false | bfloat16 | 4.29.2 | true | 125,696 | null | null | null | null | null | null |
SDUIRLab/fuzi-mingcha-v1_0 | null | null | "2023-08-31T03:30:17Z" | null | null | 102 | null | null | null | null | 9 | transformers | [
"transformers",
"pytorch",
"chatglm",
"feature-extraction",
"legal",
"text-generation",
"custom_code",
"license:apache-2.0",
"region:us"
] | text-generation | null | null | 1 | [
"ChatGLMForConditionalGeneration"
] | 130,004 | 130,005 | null | 4,096 | null | null | null | chatglm | 32 | null | null | null | null | null | null | float16 | 4.30.0.dev0 | false | 130,528 | null | null | null | null | null | null |
squarelike/Gugugo-koja-1.3B-V0.95 | null | null | "2023-08-31T14:17:12Z" | null | null | 82 | null | null | null | null | 2 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt_neox",
"text-generation",
"translation",
"ja",
"ko",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | translation | null | null | 1 | [
"GPTNeoXForCausalLM"
] | 0 | 2 | gelu | 2,048 | 0.02 | 8,192 | 2,048 | gpt_neox | 16 | 24 | null | null | null | null | false | float16 | 4.32.0.dev0 | true | 30,080 | null | 0 | null | null | null | null |
TinyLlama/TinyLlama-1.1B-step-50K-105b | null | null | "2023-09-01T08:59:02Z" | null | null | 11,238 | null | null | null | null | 132 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:cerebras/SlimPajama-627B",
"dataset:bigcode/starcoderdata",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | null | null | false | float32 | 4.31.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
Xenova/WizardCoder-1B-V1.0 | null | null | "2023-09-01T19:43:54Z" | null | null | 87 | null | null | null | null | 4 | transformers.js | [
"transformers.js",
"onnx",
"gpt_bigcode",
"text-generation",
"base_model:WizardLM/WizardCoder-1B-V1.0",
"base_model:quantized:WizardLM/WizardCoder-1B-V1.0",
"region:us"
] | text-generation | null | null | 1 | [
"GPTBigCodeForCausalLM"
] | 0 | 0 | null | null | 0.02 | null | null | gpt_bigcode | null | null | null | null | null | null | null | null | 4.33.0.dev0 | false | 49,153 | null | null | null | null | null | null |
TheBloke/OpenBuddy-Llama2-13B-v11.1-GGUF | null | null | "2023-09-02T09:49:00Z" | null | null | 907 | null | null | null | null | 28 | transformers | [
"transformers",
"gguf",
"llama",
"text-generation",
"zh",
"en",
"fr",
"de",
"ja",
"ko",
"it",
"ru",
"base_model:OpenBuddy/openbuddy-llama2-13b-v11.1-bf16",
"base_model:quantized:OpenBuddy/openbuddy-llama2-13b-v11.1-bf16",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
elinas/chronos-70b-v2 | null | null | "2023-09-03T05:08:04Z" | null | null | 838 | null | null | null | null | 16 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"chat",
"roleplay",
"storywriting",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
42dot/42dot_LLM-PLM-1.3B | null | null | "2023-09-04T05:54:07Z" | null | null | 2,956 | null | null | null | null | 25 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"causal-lm",
"42dot_llm",
"en",
"ko",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 50,257 | 50,256 | silu | 2,048 | 0.01 | 5,632 | 4,096 | llama | 32 | 24 | 32 | 0.000001 | null | null | false | float32 | 4.31.0 | true | 50,304 | null | null | null | null | 1 | null |
TheBloke/Llama-2-7B-GGUF | null | null | "2023-09-04T15:53:57Z" | null | null | 19,218 | null | null | null | null | 178 | transformers | [
"transformers",
"gguf",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-7b-hf",
"base_model:quantized:meta-llama/Llama-2-7b-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Llama-2-13B-chat-GGUF | null | null | "2023-09-04T17:20:15Z" | null | null | 13,785 | null | null | null | null | 191 | transformers | [
"transformers",
"gguf",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-13b-chat-hf",
"base_model:quantized:meta-llama/Llama-2-13b-chat-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Llama-2-70B-Chat-GGUF | null | null | "2023-09-04T17:53:09Z" | null | null | 9,416 | null | null | null | null | 122 | transformers | [
"transformers",
"gguf",
"llama",
"facebook",
"meta",
"pytorch",
"llama-2",
"text-generation",
"en",
"arxiv:2307.09288",
"base_model:meta-llama/Llama-2-70b-chat-hf",
"base_model:quantized:meta-llama/Llama-2-70b-chat-hf",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
PygmalionAI/pygmalion-2-13b | null | null | "2023-09-04T22:05:31Z" | null | null | 1,949 | null | null | null | null | 76 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"text generation",
"instruct",
"en",
"dataset:PygmalionAI/PIPPA",
"dataset:Open-Orca/OpenOrca",
"dataset:Norquinal/claude_multiround_chat_30k",
"dataset:jondurbin/airoboros-gpt4-1.4.1",
"dataset:databricks/databricks-dolly-15k",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
PygmalionAI/mythalion-13b | null | null | "2023-09-05T12:45:18Z" | null | null | 2,122 | null | null | null | null | 144 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"text generation",
"instruct",
"en",
"dataset:PygmalionAI/PIPPA",
"dataset:Open-Orca/OpenOrca",
"dataset:Norquinal/claude_multiround_chat_30k",
"dataset:jondurbin/airoboros-gpt4-1.4.1",
"dataset:databricks/databricks-dolly-15k",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
Undi95/CreativityEngine | null | null | "2023-09-05T14:17:21Z" | null | null | 785 | null | null | null | null | 1 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.33.0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Pygmalion-2-7B-GGUF | null | null | "2023-09-05T22:02:07Z" | null | null | 4,506 | null | null | null | null | 26 | transformers | [
"transformers",
"gguf",
"llama",
"text generation",
"instruct",
"text-generation",
"en",
"dataset:PygmalionAI/PIPPA",
"dataset:Open-Orca/OpenOrca",
"dataset:Norquinal/claude_multiround_chat_30k",
"dataset:jondurbin/airoboros-gpt4-1.4.1",
"dataset:databricks/databricks-dolly-15k",
"base_model:PygmalionAI/pygmalion-2-7b",
"base_model:quantized:PygmalionAI/pygmalion-2-7b",
"license:llama2",
"region:us"
] | text-generation | null | null | 1 | null | null | null | null | null | null | null | null | llama | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null |
TheBloke/Mythalion-13B-GPTQ | null | null | "2023-09-05T22:02:52Z" | null | null | 322 | null | null | null | null | 53 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text generation",
"instruct",
"en",
"dataset:PygmalionAI/PIPPA",
"dataset:Open-Orca/OpenOrca",
"dataset:Norquinal/claude_multiround_chat_30k",
"dataset:jondurbin/airoboros-gpt4-1.4.1",
"dataset:databricks/databricks-dolly-15k",
"base_model:PygmalionAI/mythalion-13b",
"base_model:quantized:PygmalionAI/mythalion-13b",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
Undi95/MLewd-L2-13B-v2 | null | null | "2023-09-05T23:26:42Z" | null | null | 22 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.33.0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Chronos-70B-v2-GPTQ | null | null | "2023-09-06T00:26:18Z" | null | null | 38 | null | null | null | null | 11 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"chat",
"roleplay",
"storywriting",
"base_model:elinas/chronos-70b-v2",
"base_model:quantized:elinas/chronos-70b-v2",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
alibaba-pai/pai-bloom-1b1-text2prompt-sd-v2 | null | null | "2023-09-06T06:54:26Z" | null | null | 107 | null | null | null | null | 19 | transformers | [
"transformers",
"pytorch",
"safetensors",
"bloom",
"text-generation",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BloomForCausalLM"
] | 1 | 2 | null | 1,536 | 0.02 | null | null | bloom | null | null | null | null | null | null | null | float16 | 4.30.0 | true | 250,880 | null | 0 | null | null | 1 | null |
Undi95/MLewd-L2-13B-v2-1 | null | null | "2023-09-06T16:16:50Z" | null | null | 30 | null | null | null | null | 7 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.33.0 | true | 32,000 | null | null | null | null | 1 | null |
bugdaryan/Code-Llama-2-13B-instruct-text2sql | null | null | "2023-09-06T16:41:05Z" | null | null | 293 | null | null | null | null | 27 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:bugdaryan/sql-create-context-instruction",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 16,384 | llama | 40 | 40 | 40 | 0.00001 | 1,000,000 | null | false | float16 | 4.34.0.dev0 | true | 32,016 | null | null | null | null | 1 | null |
TheBloke/Falcon-180B-Chat-GPTQ | null | null | "2023-09-06T16:47:56Z" | null | null | 2,463 | null | null | null | null | 69 | transformers | [
"transformers",
"safetensors",
"falcon",
"text-generation",
"en",
"de",
"es",
"fr",
"dataset:tiiuae/falcon-refinedweb",
"arxiv:1911.02150",
"arxiv:2005.14165",
"arxiv:2104.09864",
"arxiv:2205.14135",
"arxiv:2306.01116",
"base_model:tiiuae/falcon-180B-chat",
"base_model:quantized:tiiuae/falcon-180B-chat",
"license:unknown",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"FalconForCausalLM"
] | 11 | 11 | null | 14,848 | 0.02 | null | 2,048 | falcon | 232 | 80 | null | null | 10,000 | null | null | float16 | 4.33.0 | true | 65,024 | null | 0 | null | null | null | null |
Undi95/MLewd-L2-13B-v2-2 | null | null | "2023-09-08T00:38:39Z" | null | null | 46 | null | null | null | null | 4 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | float16 | 4.33.1 | true | 32,000 | null | null | null | null | 1 | null |
Arjun-G-Ravi/chat-GPT2 | null | null | "2023-09-08T03:00:47Z" | null | null | 663 | null | null | null | null | 4 | transformers | [
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"chemistry",
"biology",
"text-generation-inference",
"en",
"dataset:MuskumPillerum/General-Knowledge",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"GPT2LMHeadModel"
] | 50,256 | 50,256 | null | null | 0.02 | null | null | gpt2 | null | null | null | null | null | null | null | float32 | 4.32.0.dev0 | true | 50,259 | null | null | null | null | null | null |
Faradaylab/ARIA-70B-V2 | null | null | "2023-09-08T17:30:29Z" | null | null | 920 | null | null | null | null | 12 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code",
"text-generation-inference",
"Meta ",
"facebook",
"openassistant",
"data",
"education",
"languages",
"legal",
"fr",
"en",
"arxiv:2307.09288",
"license:llama2",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float16 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
microsoft/phi-1_5 | null | null | "2023-09-10T04:03:46Z" | null | null | 129,516 | null | null | null | null | 1,314 | transformers | [
"transformers",
"safetensors",
"phi",
"text-generation",
"nlp",
"code",
"en",
"arxiv:2309.05463",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"PhiForCausalLM"
] | null | null | gelu_new | 2,048 | 0.02 | 8,192 | 2,048 | phi | 32 | 24 | null | null | 10,000 | null | false | float16 | 4.37.0 | true | 51,200 | null | 0 | null | null | null | null |
teknium/Puffin-Phi-v2 | null | null | "2023-09-12T22:00:45Z" | null | null | 24 | null | null | null | null | 40 | transformers | [
"transformers",
"pytorch",
"mixformer-sequential",
"text-generation",
"custom_code",
"en",
"dataset:LDJnr/Puffin",
"license:other",
"autotrain_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"MixFormerSequentialForCausalLM"
] | null | null | null | null | 0.02 | null | null | mixformer-sequential | null | null | null | null | null | null | false | bfloat16 | 4.34.0.dev0 | false | 50,304 | null | null | null | null | null | null |
ByteWave/prompt-generator | null | null | "2023-09-14T08:59:05Z" | null | null | 370 | null | null | null | null | 16 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"ChatGPT",
"Llama",
"Agents",
"LLMs",
"en",
"dataset:fka/awesome-chatgpt-prompts",
"dataset:PulsarAI/awesome-chatgpt-prompts-advanced",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 3,200 | 0.02 | 8,640 | 2,048 | llama | 32 | 26 | 32 | 0.000001 | 10,000 | null | false | float16 | 4.33.1 | true | 32,000 | null | null | null | null | 1 | null |
Suprit/Zhongjing-LLaMA-base | null | null | "2023-09-14T17:02:02Z" | null | null | 1,032 | null | null | null | null | 8 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"zh",
"arxiv:2308.03549",
"license:mit",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float16 | 4.29.1 | true | 39,424 | null | null | null | null | null | null |
ajibawa-2023/Uncensored-Frank-13B | null | null | "2023-09-14T18:20:25Z" | null | null | 821 | null | null | null | null | 8 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"en",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"license:cc-by-nc-nd-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | bfloat16 | 4.28.1 | false | 32,000 | null | null | null | null | 1 | null |
Xwin-LM/Xwin-LM-70B-V0.1 | null | null | "2023-09-15T14:04:14Z" | null | null | 1,363 | null | null | null | null | 213 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 4,096 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float32 | 4.28.1 | false | 32,000 | null | null | null | null | 1 | null |
baraqw/ajax12_finetuned | null | null | "2023-09-16T06:22:42Z" | null | null | 7 | null | null | null | null | 1 | transformers | [
"transformers",
"pytorch",
"biogpt",
"text-generation",
"generated_from_trainer",
"base_model:microsoft/biogpt",
"base_model:finetune:microsoft/biogpt",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"BioGptForCausalLM"
] | 0 | 2 | gelu | 1,024 | 0.02 | 4,096 | 1,024 | biogpt | 16 | 24 | null | null | null | null | null | float32 | 4.33.2 | true | 42,384 | null | null | null | null | null | null |
TinyLlama/TinyLlama-1.1B-Chat-v0.1 | null | null | "2023-09-16T14:15:48Z" | null | null | 3,161 | null | null | null | null | 52 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"en",
"dataset:cerebras/SlimPajama-627B",
"dataset:bigcode/starcoderdata",
"dataset:timdettmers/openassistant-guanaco",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 2,048 | 0.02 | 5,632 | 2,048 | llama | 32 | 22 | 4 | 0.00001 | 10,000 | null | false | float32 | 4.33.1 | false | 32,001 | null | null | null | null | 1 | null |
glaiveai/glaive-coder-7b | null | null | "2023-09-17T14:49:44Z" | null | null | 1,326 | null | null | null | null | 54 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"code",
"en",
"dataset:glaiveai/glaive-code-assistant",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 16,384 | llama | 32 | 32 | 32 | 0.00001 | 1,000,000 | null | false | float32 | 4.32.0 | true | 32,017 | null | null | null | null | 1 | null |
AdaptLLM/medicine-LLM | null | null | "2023-09-18T07:59:28Z" | null | null | 125 | null | null | null | null | 35 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"biology",
"medical",
"en",
"dataset:Open-Orca/OpenOrca",
"dataset:GAIR/lima",
"dataset:WizardLM/WizardLM_evol_instruct_V2_196k",
"dataset:EleutherAI/pile",
"arxiv:2309.09530",
"arxiv:2406.14491",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LLaMAForCausalLM"
] | 0 | 1 | silu | 4,096 | 0.02 | 11,008 | null | llama | 32 | 32 | null | 0.000001 | null | null | null | float16 | 4.27.0.dev0 | true | 32,001 | null | null | null | null | null | null |
AdaptLLM/law-LLM | null | null | "2023-09-18T13:44:51Z" | null | null | 274 | null | null | null | null | 55 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"legal",
"en",
"dataset:Open-Orca/OpenOrca",
"dataset:GAIR/lima",
"dataset:WizardLM/WizardLM_evol_instruct_V2_196k",
"dataset:EleutherAI/pile",
"arxiv:2309.09530",
"arxiv:2406.14491",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LLaMAForCausalLM"
] | 0 | 1 | silu | 4,096 | 0.02 | 11,008 | null | llama | 32 | 32 | null | 0.000001 | null | null | null | float16 | 4.27.0.dev0 | true | 32,001 | null | null | null | null | null | null |
AdaptLLM/finance-LLM | null | null | "2023-09-18T13:45:13Z" | null | null | 747 | null | null | null | null | 102 | transformers | [
"transformers",
"pytorch",
"safetensors",
"llama",
"text-generation",
"finance",
"en",
"dataset:Open-Orca/OpenOrca",
"dataset:GAIR/lima",
"dataset:WizardLM/WizardLM_evol_instruct_V2_196k",
"arxiv:2309.09530",
"arxiv:2406.14491",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LLaMAForCausalLM"
] | 0 | 1 | silu | 4,096 | 0.02 | 11,008 | null | llama | 32 | 32 | null | 0.000001 | null | null | null | float16 | 4.27.0.dev0 | true | 32,001 | null | null | null | null | null | null |
OpenBuddy/openbuddy-openllama-7b-v12-bf16 | null | null | "2023-09-19T04:26:50Z" | null | null | 2,574 | null | null | null | null | 3 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"zh",
"en",
"fr",
"de",
"ja",
"ko",
"it",
"ru",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.000001 | 10,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 37,120 | null | null | null | null | 1 | null |
starmpcc/Asclepius-Llama2-7B | null | null | "2023-09-19T05:41:41Z" | null | null | 1,025 | null | null | null | null | 13 | transformers | [
"transformers",
"pytorch",
"llama",
"text-generation",
"medical",
"text2text-generation",
"en",
"dataset:starmpcc/Asclepius-Synthetic-Clinical-Notes",
"arxiv:2309.00237",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text2text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | float32 | 4.28.0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/llama2_70b_chat_uncensored-AWQ | null | null | "2023-09-19T06:51:16Z" | null | null | 48 | null | null | null | null | 6 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"uncensored",
"wizard",
"vicuna",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"arxiv:2305.14314",
"base_model:jarradh/llama2_70b_chat_uncensored",
"base_model:quantized:jarradh/llama2_70b_chat_uncensored",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 28,672 | 2,048 | llama | 64 | 80 | 8 | 0.00001 | null | null | false | float32 | 4.31.0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Pygmalion-2-7B-AWQ | null | null | "2023-09-19T07:38:42Z" | null | null | 105 | null | null | null | null | 5 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"text generation",
"instruct",
"en",
"dataset:PygmalionAI/PIPPA",
"dataset:Open-Orca/OpenOrca",
"dataset:Norquinal/claude_multiround_chat_30k",
"dataset:jondurbin/airoboros-gpt4-1.4.1",
"dataset:databricks/databricks-dolly-15k",
"base_model:PygmalionAI/pygmalion-2-7b",
"base_model:quantized:PygmalionAI/pygmalion-2-7b",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Phind-CodeLlama-34B-v2-AWQ | null | null | "2023-09-19T10:08:38Z" | null | null | 460 | null | null | null | null | 32 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"code llama",
"base_model:Phind/Phind-CodeLlama-34B-v2",
"base_model:quantized:Phind/Phind-CodeLlama-34B-v2",
"license:llama2",
"model-index",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | 16,384 | llama | 64 | 48 | 8 | 0.00001 | 1,000,000 | null | false | bfloat16 | 4.33.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Wizard-Vicuna-13B-Uncensored-AWQ | null | null | "2023-09-19T22:57:57Z" | null | null | 86 | null | null | null | null | 13 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"uncensored",
"en",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"base_model:cognitivecomputations/Wizard-Vicuna-13B-Uncensored",
"base_model:quantized:cognitivecomputations/Wizard-Vicuna-13B-Uncensored",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 2,048 | llama | 40 | 40 | null | 0.000001 | null | null | false | float32 | 4.28.1 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/Wizard-Vicuna-30B-Uncensored-AWQ | null | null | "2023-09-19T23:02:49Z" | null | null | 106 | null | null | null | null | 13 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"uncensored",
"en",
"dataset:ehartford/wizard_vicuna_70k_unfiltered",
"base_model:cognitivecomputations/Wizard-Vicuna-30B-Uncensored",
"base_model:quantized:cognitivecomputations/Wizard-Vicuna-30B-Uncensored",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 6,656 | 0.02 | 17,920 | 2,048 | llama | 52 | 60 | null | 0.000001 | null | null | false | float16 | 4.30.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
TheBloke/WizardLM-7B-uncensored-AWQ | null | null | "2023-09-19T23:17:38Z" | null | null | 44 | null | null | null | null | 4 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"uncensored",
"dataset:ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered",
"base_model:cognitivecomputations/WizardLM-7B-Uncensored",
"base_model:quantized:cognitivecomputations/WizardLM-7B-Uncensored",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 2,048 | llama | 32 | 32 | null | 0.000001 | null | null | false | float16 | 4.29.0.dev0 | true | 32,001 | null | null | null | null | 1 | null |
TheBloke/LLaMA-65B-AWQ | null | null | "2023-09-20T02:33:20Z" | null | null | 10 | null | null | null | null | 2 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"license:other",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"awq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 8,192 | 0.02 | 22,016 | null | llama | 64 | 80 | null | 0.00001 | null | null | false | float16 | 4.28.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
R136a1/MythoMax-L2-13B-exl2 | null | null | "2023-09-21T16:27:43Z" | null | null | 52 | null | null | null | null | 6 | transformers | [
"transformers",
"llama",
"text-generation",
"en",
"license:other",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | null | null | false | float16 | 4.32.0.dev0 | true | 32,000 | null | null | null | null | 1 | null |
MathLLMs/MathCoder-L-7B | null | null | "2023-09-22T11:28:29Z" | null | null | 122 | null | null | null | null | 14 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"en",
"arxiv:2310.03731",
"license:apache-2.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 4,096 | 0.02 | 11,008 | 4,096 | llama | 32 | 32 | 32 | 0.00001 | null | null | false | bfloat16 | 4.31.0 | true | 32,008 | null | null | null | null | 1 | null |
Undi95/MXLewd-L2-20B | null | null | "2023-09-22T16:04:41Z" | null | null | 839 | null | null | null | null | 27 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"license:cc-by-nc-4.0",
"autotrain_compatible",
"text-generation-inference",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 62 | 40 | 0.00001 | 10,000 | null | false | bfloat16 | 4.33.2 | true | 32,000 | null | null | null | null | 1 | null |
SarthakBhatore/codegen-350M-mono-18k-alpaca-python | null | null | "2023-09-23T15:01:05Z" | null | null | 16 | null | null | null | null | 2 | transformers | [
"transformers",
"pytorch",
"codegen",
"text-generation",
"dataset:iamtarun/python_code_instructions_18k_alpaca",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] | text-generation | null | null | 1 | [
"CodeGenForCausalLM"
] | 1 | 50,256 | null | null | 0.02 | null | null | codegen | null | null | null | null | null | null | false | float16 | 4.33.2 | true | 51,200 | null | null | null | null | null | null |
TheBloke/storytime-13B-GPTQ | null | null | "2023-09-23T23:27:40Z" | null | null | 309 | null | null | null | null | 30 | transformers | [
"transformers",
"safetensors",
"llama",
"text-generation",
"en",
"base_model:chargoddard/storytime-13b",
"base_model:quantized:chargoddard/storytime-13b",
"license:llama2",
"autotrain_compatible",
"text-generation-inference",
"4-bit",
"gptq",
"region:us"
] | text-generation | null | null | 1 | [
"LlamaForCausalLM"
] | 1 | 2 | silu | 5,120 | 0.02 | 13,824 | 4,096 | llama | 40 | 40 | 40 | 0.00001 | 10,000 | null | false | bfloat16 | 4.33.2 | true | 32,000 | null | null | null | null | 1 | null |