_id
stringlengths
24
24
id
stringlengths
14
79
author
stringlengths
3
21
gated
stringclasses
3 values
inference
stringclasses
9 values
lastModified
stringlengths
24
24
likes
int64
0
4.13k
private
bool
1 class
sha
stringlengths
40
40
config
stringlengths
23
5.92k
downloads
int64
1.8k
3.13M
tags
sequencelengths
5
62
pipeline_tag
stringclasses
6 values
library_name
stringclasses
4 values
createdAt
stringlengths
24
24
modelId
stringlengths
14
79
siblings
listlengths
4
285
model_type
stringclasses
13 values
base_model
stringlengths
13
43
base_model_downloads
float64
98
28.9k
base_model_pipeline
stringclasses
1 value
base_model_children_count
float64
98
28.9k
adapter
float64
0
377
merge
float64
0
94
finetune
float64
0
263
quantized
float64
0
182
__index_level_0__
int64
86k
992k
6698d8a0653e4babe21e1e7d
meta-llama/Llama-3.1-8B-Instruct
meta-llama
manual
loading
2024-09-25T17:00:57.000Z
2,649
false
0e9e39f249a16976918f6564b8830bc894c89659
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", "eos_token": "<|eot_id|>"}}
3,134,749
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-3", "conversational", "en", "de", "fr", "it", "pt", "hi", "es", "th", "arxiv:2204.05149", "base_model:meta-llama/Llama-3.1-8B", "base_model:finetune:meta-llama/Llama-3.1-8B", "license:llama3.1", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-18T08:56:00.000Z
meta-llama/Llama-3.1-8B-Instruct
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "original/consolidated.00.pth" }, { "rfilename": "original/params.json" }, { "rfilename": "original/tokenizer.model" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-3.1-8B
254
text-generation
254
377
22
241
182
760,607
667928d58601cd537f63c846
google/gemma-2-9b-it
google
manual
warm
2024-08-27T19:41:49.000Z
468
false
11c9b309abf73637e4b6f9a3fa1e92e615547819
{"architectures": ["Gemma2ForCausalLM"], "model_type": "gemma2", "tokenizer_config": {"bos_token": "<bos>", "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}", "eos_token": "<eos>", "pad_token": "<pad>", "unk_token": "<unk>", "use_default_system_prompt": false}}
964,871
[ "transformers", "safetensors", "gemma2", "text-generation", "conversational", "arxiv:2009.03300", "arxiv:1905.07830", "arxiv:1911.11641", "arxiv:1904.09728", "arxiv:1905.10044", "arxiv:1907.10641", "arxiv:1811.00937", "arxiv:1809.02789", "arxiv:1911.01547", "arxiv:1705.03551", "arxiv:2107.03374", "arxiv:2108.07732", "arxiv:2110.14168", "arxiv:2009.11462", "arxiv:2101.11718", "arxiv:2110.08193", "arxiv:1804.09301", "arxiv:2109.07958", "arxiv:1804.06876", "arxiv:2103.03874", "arxiv:2304.06364", "arxiv:2206.04615", "arxiv:2203.09509", "base_model:google/gemma-2-9b", "base_model:finetune:google/gemma-2-9b", "license:gemma", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-06-24T08:05:41.000Z
google/gemma-2-9b-it
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "transformers/transformers-4.42.0.dev0-py3-none-any.whl" } ]
gemma2
google/gemma-2-9b
105
text-generation
105
25
6
33
69
718,125
653a4cbd66081db6fc588b21
HuggingFaceH4/zephyr-7b-beta
HuggingFaceH4
False
warm
2024-09-23T13:33:11.000Z
1,578
false
20e1a5880bb00a7571542fe3fe6cb2dcb4816eee
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
749,121
[ "transformers", "pytorch", "safetensors", "mistral", "text-generation", "generated_from_trainer", "conversational", "en", "dataset:HuggingFaceH4/ultrachat_200k", "dataset:HuggingFaceH4/ultrafeedback_binarized", "arxiv:2305.18290", "arxiv:2310.16944", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:mit", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-26T11:25:49.000Z
HuggingFaceH4/zephyr-7b-beta
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "all_results.json" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00008.safetensors" }, { "rfilename": "model-00002-of-00008.safetensors" }, { "rfilename": "model-00003-of-00008.safetensors" }, { "rfilename": "model-00004-of-00008.safetensors" }, { "rfilename": "model-00005-of-00008.safetensors" }, { "rfilename": "model-00006-of-00008.safetensors" }, { "rfilename": "model-00007-of-00008.safetensors" }, { "rfilename": "model-00008-of-00008.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model-00001-of-00008.bin" }, { "rfilename": "pytorch_model-00002-of-00008.bin" }, { "rfilename": "pytorch_model-00003-of-00008.bin" }, { "rfilename": "pytorch_model-00004-of-00008.bin" }, { "rfilename": "pytorch_model-00005-of-00008.bin" }, { "rfilename": "pytorch_model-00006-of-00008.bin" }, { "rfilename": "pytorch_model-00007-of-00008.bin" }, { "rfilename": "pytorch_model-00008-of-00008.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" }, { "rfilename": "trainer_state.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
274
76
120
20
357,190
657607b0a90ae2daaef65917
mistralai/Mixtral-8x7B-Instruct-v0.1
mistralai
auto
warm
2024-08-19T13:18:42.000Z
4,130
false
41bd4c9e7e4fb318ca40e721131d4933966c2cc1
{"architectures": ["MixtralForCausalLM"], "model_type": "mixtral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}\n {{- raise_exception('After the optional system message, conversation roles must alternate user/assistant/user/assistant/...') }}\n {%- endif %}\n {%- if message['role'] == 'user' %}\n {%- if loop.first and system_message is defined %}\n {{- ' [INST] ' + system_message + '\\n\\n' + message['content'] + ' [/INST]' }}\n {%- else %}\n {{- ' [INST] ' + message['content'] + ' [/INST]' }}\n {%- endif %}\n {%- elif message['role'] == 'assistant' %}\n {{- ' ' + message['content'] + eos_token}}\n {%- else %}\n {{- raise_exception('Only user and assistant roles are supported, with the exception of an initial optional system message!') }}\n {%- endif %}\n{%- endfor %}\n", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false}}
578,188
[ "transformers", "safetensors", "mixtral", "text-generation", "conversational", "fr", "it", "de", "es", "en", "base_model:mistralai/Mixtral-8x7B-v0.1", "base_model:finetune:mistralai/Mixtral-8x7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-12-10T18:47:12.000Z
mistralai/Mixtral-8x7B-Instruct-v0.1
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "consolidated.00.pt" }, { "rfilename": "consolidated.01.pt" }, { "rfilename": "consolidated.02.pt" }, { "rfilename": "consolidated.03.pt" }, { "rfilename": "consolidated.04.pt" }, { "rfilename": "consolidated.05.pt" }, { "rfilename": "consolidated.06.pt" }, { "rfilename": "consolidated.07.pt" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00019.safetensors" }, { "rfilename": "model-00002-of-00019.safetensors" }, { "rfilename": "model-00003-of-00019.safetensors" }, { "rfilename": "model-00004-of-00019.safetensors" }, { "rfilename": "model-00005-of-00019.safetensors" }, { "rfilename": "model-00006-of-00019.safetensors" }, { "rfilename": "model-00007-of-00019.safetensors" }, { "rfilename": "model-00008-of-00019.safetensors" }, { "rfilename": "model-00009-of-00019.safetensors" }, { "rfilename": "model-00010-of-00019.safetensors" }, { "rfilename": "model-00011-of-00019.safetensors" }, { "rfilename": "model-00012-of-00019.safetensors" }, { "rfilename": "model-00013-of-00019.safetensors" }, { "rfilename": "model-00014-of-00019.safetensors" }, { "rfilename": "model-00015-of-00019.safetensors" }, { "rfilename": "model-00016-of-00019.safetensors" }, { "rfilename": "model-00017-of-00019.safetensors" }, { "rfilename": "model-00018-of-00019.safetensors" }, { "rfilename": "model-00019-of-00019.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mixtral
mistralai/Mixtral-8x7B-v0.1
182
text-generation
182
110
48
33
20
413,206
664dc170474f2283fa5c8659
mistralai/Mistral-7B-Instruct-v0.3
mistralai
auto
warm
2024-08-21T12:18:25.000Z
998
false
e0bc86c23ce5aae1db576c8cca6f06f1f73af2db
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{%- if messages[0][\"role\"] == \"system\" %}\n {%- set system_message = messages[0][\"content\"] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n{%- set user_messages = loop_messages | selectattr(\"role\", \"equalto\", \"user\") | list %}\n\n{#- This block checks for alternating user/assistant messages, skipping tool calling messages #}\n{%- set ns = namespace() %}\n{%- set ns.index = 0 %}\n{%- for message in loop_messages %}\n {%- if not (message.role == \"tool\" or message.role == \"tool_results\" or (message.tool_calls is defined and message.tool_calls is not none)) %}\n {%- if (message[\"role\"] == \"user\") != (ns.index % 2 == 0) %}\n {{- raise_exception(\"After the optional system message, conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif %}\n {%- set ns.index = ns.index + 1 %}\n {%- endif %}\n{%- endfor %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if message[\"role\"] == \"user\" %}\n {%- if tools is not none and (message == user_messages[-1]) %}\n {{- \"[AVAILABLE_TOOLS] [\" }}\n {%- for tool in tools %}\n {%- set tool = tool.function %}\n {{- '{\"type\": \"function\", \"function\": {' }}\n {%- for key, val in tool.items() if key != \"return\" %}\n {%- if val is string %}\n {{- '\"' + key + '\": \"' + val + '\"' }}\n {%- else %}\n {{- '\"' + key + '\": ' + val|tojson }}\n {%- endif %}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"}}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" }}\n {%- endif %}\n {%- endfor %}\n {{- \"[/AVAILABLE_TOOLS]\" }}\n {%- endif %}\n {%- if loop.last and system_message is defined %}\n {{- \"[INST] \" + system_message + \"\\n\\n\" + message[\"content\"] + \"[/INST]\" }}\n {%- else %}\n {{- \"[INST] \" + message[\"content\"] + \"[/INST]\" }}\n {%- endif %}\n {%- elif message.tool_calls is defined and message.tool_calls is not none %}\n {{- \"[TOOL_CALLS] [\" }}\n {%- for tool_call in message.tool_calls %}\n {%- set out = tool_call.function|tojson %}\n {{- out[:-1] }}\n {%- if not tool_call.id is defined or tool_call.id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- ', \"id\": \"' + tool_call.id + '\"}' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" + eos_token }}\n {%- endif %}\n {%- endfor %}\n {%- elif message[\"role\"] == \"assistant\" %}\n {{- \" \" + message[\"content\"]|trim + eos_token}}\n {%- elif message[\"role\"] == \"tool_results\" or message[\"role\"] == \"tool\" %}\n {%- if message.content is defined and message.content.content is defined %}\n {%- set content = message.content.content %}\n {%- else %}\n {%- set content = message.content %}\n {%- endif %}\n {{- '[TOOL_RESULTS] {\"content\": ' + content|string + \", \" }}\n {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- '\"call_id\": \"' + message.tool_call_id + '\"}[/TOOL_RESULTS]' }}\n {%- else %}\n {{- raise_exception(\"Only user and assistant roles are supported, with the exception of an initial optional system message!\") }}\n {%- endif %}\n{%- endfor %}\n", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false}}
564,853
[ "transformers", "safetensors", "mistral", "text-generation", "conversational", "base_model:mistralai/Mistral-7B-v0.3", "base_model:finetune:mistralai/Mistral-7B-v0.3", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-22T09:57:04.000Z
mistralai/Mistral-7B-Instruct-v0.3
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "consolidated.safetensors" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00003.safetensors" }, { "rfilename": "model-00002-of-00003.safetensors" }, { "rfilename": "model-00003-of-00003.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "params.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer.model.v3" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.3
338
text-generation
338
181
10
51
87
652,648
65143cd8e31c0e2e3df713e5
mistralai/Mistral-7B-Instruct-v0.1
mistralai
auto
warm
2024-08-22T11:33:25.000Z
1,506
false
2dcff66eac0c01dc50e4c41eea959968232187fe
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}\n {{- raise_exception('After the optional system message, conversation roles must alternate user/assistant/user/assistant/...') }}\n {%- endif %}\n {%- if message['role'] == 'user' %}\n {%- if loop.first and system_message is defined %}\n {{- ' [INST] ' + system_message + '\\n\\n' + message['content'] + ' [/INST]' }}\n {%- else %}\n {{- ' [INST] ' + message['content'] + ' [/INST]' }}\n {%- endif %}\n {%- elif message['role'] == 'assistant' %}\n {{- ' ' + message['content'] + eos_token}}\n {%- else %}\n {{- raise_exception('Only user and assistant roles are supported, with the exception of an initial optional system message!') }}\n {%- endif %}\n{%- endfor %}\n", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false}}
562,402
[ "transformers", "pytorch", "safetensors", "mistral", "text-generation", "finetuned", "conversational", "arxiv:2310.06825", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-09-27T14:31:52.000Z
mistralai/Mistral-7B-Instruct-v0.1
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model-00001-of-00002.bin" }, { "rfilename": "pytorch_model-00002-of-00002.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer.model.v1" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
340
41
139
14
326,808
665ee74789b4fd787a568664
Qwen/Qwen2-7B-Instruct
Qwen
False
cold
2024-08-21T10:29:04.000Z
573
false
f2826a00ceef68f0f2b946d945ecc0477ce4450c
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": null}}
410,195
[ "transformers", "safetensors", "qwen2", "text-generation", "chat", "conversational", "en", "arxiv:2309.00071", "base_model:Qwen/Qwen2-7B", "base_model:finetune:Qwen/Qwen2-7B", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-06-04T10:07:03.000Z
Qwen/Qwen2-7B-Instruct
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen2-7B
266
text-generation
266
49
3
42
69
680,671
669650bb11dbbf600cf4dcf0
google/gemma-2-2b-it
google
manual
warm
2024-08-27T19:41:44.000Z
570
false
299a8560bedf22ed1c72a8a11e7dce4a7f9f51f8
{"architectures": ["Gemma2ForCausalLM"], "model_type": "gemma2", "tokenizer_config": {"bos_token": "<bos>", "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}", "eos_token": "<eos>", "pad_token": "<pad>", "unk_token": "<unk>", "use_default_system_prompt": false}}
358,990
[ "transformers", "safetensors", "gemma2", "text-generation", "conversational", "arxiv:2009.03300", "arxiv:1905.07830", "arxiv:1911.11641", "arxiv:1904.09728", "arxiv:1905.10044", "arxiv:1907.10641", "arxiv:1811.00937", "arxiv:1809.02789", "arxiv:1911.01547", "arxiv:1705.03551", "arxiv:2107.03374", "arxiv:2108.07732", "arxiv:2110.14168", "arxiv:2009.11462", "arxiv:2101.11718", "arxiv:2110.08193", "arxiv:1804.09301", "arxiv:2109.07958", "arxiv:1804.06876", "arxiv:2103.03874", "arxiv:2304.06364", "arxiv:1903.00161", "arxiv:2206.04615", "arxiv:2203.09509", "arxiv:2403.13793", "base_model:google/gemma-2-2b", "base_model:finetune:google/gemma-2-2b", "license:gemma", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-16T10:51:39.000Z
google/gemma-2-2b-it
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
gemma2
google/gemma-2-2b
256
text-generation
256
120
3
74
77
755,923
664e5306762e0826aea132d0
thesven/Mistral-7B-Instruct-v0.3-GPTQ
thesven
False
cold
2024-09-11T17:17:42.000Z
0
false
2d24a7f41b3d93fba3aef0cde15a77f09e99437c
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false}}
345,189
[ "transformers", "safetensors", "mistral", "text-generation", "conversational", "base_model:mistralai/Mistral-7B-Instruct-v0.3", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.3", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2024-05-22T20:18:14.000Z
thesven/Mistral-7B-Instruct-v0.3-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.3
320
text-generation
320
0
0
0
0
653,744
6515ff5b7f18cec973af0828
TheBloke/Mistral-7B-Instruct-v0.1-GPTQ
TheBloke
False
explicit-opt-out
2023-09-29T20:48:48.000Z
75
false
6ae1e4ae2cfbaf107c705ed722ec243b4f88014d
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
296,813
[ "transformers", "safetensors", "mistral", "text-generation", "finetuned", "conversational", "base_model:mistralai/Mistral-7B-Instruct-v0.1", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-09-28T22:34:03.000Z
TheBloke/Mistral-7B-Instruct-v0.1-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.1
500
text-generation
500
191
0
92
0
328,345
65cac0d27faf059c56a5821f
google/gemma-7b-it
google
manual
cold
2024-08-14T08:36:20.000Z
1,128
false
9c5798d27f588501ce1e108079d2a19e4c3a2353
{"architectures": ["GemmaForCausalLM"], "model_type": "gemma", "tokenizer_config": {"bos_token": "<bos>", "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}", "eos_token": "<eos>", "pad_token": "<pad>", "unk_token": "<unk>", "use_default_system_prompt": false}}
273,413
[ "transformers", "safetensors", "gguf", "gemma", "text-generation", "conversational", "arxiv:2312.11805", "arxiv:2009.03300", "arxiv:1905.07830", "arxiv:1911.11641", "arxiv:1904.09728", "arxiv:1905.10044", "arxiv:1907.10641", "arxiv:1811.00937", "arxiv:1809.02789", "arxiv:1911.01547", "arxiv:1705.03551", "arxiv:2107.03374", "arxiv:2108.07732", "arxiv:2110.14168", "arxiv:2304.06364", "arxiv:2206.04615", "arxiv:1804.06876", "arxiv:2110.08193", "arxiv:2009.11462", "arxiv:2101.11718", "arxiv:1804.09301", "arxiv:2109.07958", "arxiv:2203.09509", "base_model:google/gemma-7b", "base_model:finetune:google/gemma-7b", "license:gemma", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-02-13T01:07:30.000Z
google/gemma-7b-it
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "gemma-7b-it.gguf" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
gemma
google/gemma-7b
9,278
text-generation
9,278
74
0
16
10
490,051
665d877e957df09a0751515e
Qwen/Qwen2-0.5B-Instruct
Qwen
False
cold
2024-08-21T10:23:36.000Z
155
false
c540970f9e29518b1d8f06ab8b24cba66ad77b6d
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": null}}
270,072
[ "transformers", "safetensors", "qwen2", "text-generation", "chat", "conversational", "en", "base_model:Qwen/Qwen2-0.5B", "base_model:finetune:Qwen/Qwen2-0.5B", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-06-03T09:06:06.000Z
Qwen/Qwen2-0.5B-Instruct
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model.safetensors" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen2-0.5B
120
text-generation
120
162
4
35
47
678,240
669fe28bb3b73c95ecd1b6cd
SanctumAI/Meta-Llama-3.1-8B-Instruct-GGUF
SanctumAI
False
cold
2024-09-11T17:15:23.000Z
10
false
1cbd69377fb06c0a4719187fc31fdc7d4020ec3a
{"model_type": "llama"}
269,192
[ "transformers", "gguf", "llama", "facebook", "meta", "pytorch", "llama-3", "text-generation", "en", "de", "fr", "it", "pt", "hi", "es", "th", "base_model:meta-llama/Llama-3.1-8B-Instruct", "base_model:quantized:meta-llama/Llama-3.1-8B-Instruct", "license:llama3.1", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-23T17:04:11.000Z
SanctumAI/Meta-Llama-3.1-8B-Instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q2_K.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q3_K_L.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q3_K_M.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q3_K_S.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q4_0.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q4_1.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q4_K.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q4_K_M.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q4_K_S.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q5_0.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q5_1.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q5_K.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q5_K_M.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q5_K_S.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q6_K.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.Q8_0.gguf" }, { "rfilename": "meta-llama-3.1-8b-instruct.f16.gguf" } ]
llama
meta-llama/Llama-3.1-8B-Instruct
695
text-generation
695
0
0
0
0
772,940
65778ac662d3ac1817cc9201
TheBloke/Mistral-7B-Instruct-v0.2-GGUF
TheBloke
False
explicit-opt-out
2023-12-11T22:23:10.000Z
383
false
3a6fbf4a41a1d52e415a4958cde6856d34b2db93
{"model_type": "mistral"}
212,551
[ "transformers", "gguf", "mistral", "finetuned", "text-generation", "arxiv:2310.06825", "base_model:mistralai/Mistral-7B-Instruct-v0.2", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.2", "license:apache-2.0", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-12-11T22:18:46.000Z
TheBloke/Mistral-7B-Instruct-v0.2-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "mistral-7b-instruct-v0.2.Q2_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q3_K_L.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q3_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q3_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q4_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q4_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q4_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q5_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q5_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q5_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q6_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.2.Q8_0.gguf" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.2
1,321
text-generation
1,321
0
0
0
0
414,766
664e8ee4437ce2c3146cb273
neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit
neuralmagic
False
cold
2024-06-10T20:59:32.000Z
11
false
ed07c8f1d2c87648508d9e153199d9b761cad5a8
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false}}
186,770
[ "transformers", "safetensors", "mistral", "text-generation", "conversational", "base_model:mistralai/Mistral-7B-Instruct-v0.3", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.3", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2024-05-23T00:33:40.000Z
neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.3
320
text-generation
320
0
0
0
0
653,995
666154bbf57dbfbdc3696bd2
Qwen/Qwen2-7B-Instruct-AWQ
Qwen
False
cold
2024-08-21T10:30:36.000Z
19
false
f7621c98023fa293e743c692981a3c8f5b564b86
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "quantization_config": {"bits": 4, "quant_method": "awq"}, "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": null}}
184,675
[ "transformers", "safetensors", "qwen2", "text-generation", "chat", "conversational", "en", "arxiv:2309.00071", "base_model:Qwen/Qwen2-7B-Instruct", "base_model:quantized:Qwen/Qwen2-7B-Instruct", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "awq", "region:us" ]
text-generation
transformers
2024-06-06T06:18:35.000Z
Qwen/Qwen2-7B-Instruct-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen2-7B-Instruct
161
text-generation
161
0
0
0
0
685,154
65148287463c4fd767f76cb6
TheBloke/Mistral-7B-Instruct-v0.1-AWQ
TheBloke
False
explicit-opt-out
2023-11-09T18:17:58.000Z
35
false
b2f7c152209c12057c3a0d77b2c01a1def7d594f
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"quant_method": "awq", "bits": 4}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
178,358
[ "transformers", "safetensors", "mistral", "text-generation", "finetuned", "base_model:mistralai/Mistral-7B-Instruct-v0.1", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-09-27T19:29:11.000Z
TheBloke/Mistral-7B-Instruct-v0.1-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.1
500
text-generation
500
0
0
0
0
327,065
669fbfba1631541594c89063
casperhansen/mistral-nemo-instruct-2407-awq
casperhansen
False
cold
2024-09-27T07:14:03.000Z
5
false
c83b6438e13051ad1c0f5683635705ee83bb8772
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "awq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}\n {{- raise_exception('After the optional system message, conversation roles must alternate user/assistant/user/assistant/...') }}\n {%- endif %}\n {%- if message['role'] == 'user' %}\n {%- if loop.last and system_message is defined %}\n {{- '[INST] ' + system_message + '\\n\\n' + message['content'] + '[/INST]' }}\n {%- else %}\n {{- '[INST] ' + message['content'] + '[/INST]' }}\n {%- endif %}\n {%- elif message['role'] == 'assistant' %}\n {{- ' ' + message['content'] + eos_token}}\n {%- else %}\n {{- raise_exception('Only user and assistant roles are supported, with the exception of an initial optional system message!') }}\n {%- endif %}\n{%- endfor %}\n", "eos_token": "</s>", "unk_token": "<unk>"}}
165,811
[ "transformers", "safetensors", "mistral", "text-generation", "conversational", "base_model:mistralai/Mistral-Nemo-Instruct-2407", "base_model:quantized:mistralai/Mistral-Nemo-Instruct-2407", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "awq", "region:us" ]
text-generation
transformers
2024-07-23T14:35:38.000Z
casperhansen/mistral-nemo-instruct-2407-awq
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-Nemo-Instruct-2407
101
text-generation
101
0
0
0
0
772,614
66231225c8920ec35160357e
aaditya/Llama3-OpenBioLLM-8B
aaditya
False
cold
2024-04-28T02:36:57.000Z
146
false
000c725dc3a680e35260b2c213163387581c974f
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "eos_token": "<|end_of_text|>", "pad_token": "<|end_of_text|>"}}
162,094
[ "transformers", "pytorch", "llama", "text-generation", "llama-3", "Mixtral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "distillation", "en", "arxiv:2305.18290", "arxiv:2303.13375", "arxiv:2212.13138", "arxiv:2305.09617", "arxiv:2402.07023", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:finetune:meta-llama/Meta-Llama-3-8B", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-20T00:53:57.000Z
aaditya/Llama3-OpenBioLLM-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "pytorch_model-00001-of-00004.bin" }, { "rfilename": "pytorch_model-00002-of-00004.bin" }, { "rfilename": "pytorch_model-00003-of-00004.bin" }, { "rfilename": "pytorch_model-00004-of-00004.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B
976
text-generation
976
0
30
8
4
594,742
6515ffbc3e5a12e0ceaefd75
TheBloke/Mistral-7B-v0.1-GPTQ
TheBloke
False
explicit-opt-out
2023-09-29T20:49:41.000Z
35
false
81de15eeac5938bc3b4065dfddf798fe5d215881
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
149,047
[ "transformers", "safetensors", "mistral", "text-generation", "pretrained", "base_model:mistralai/Mistral-7B-v0.1", "base_model:quantized:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-09-28T22:35:40.000Z
TheBloke/Mistral-7B-v0.1-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
25
0
5
1
328,348
66968801480e8b04bb69248c
SanctumAI/gemma-2-9b-it-GGUF
SanctumAI
False
cold
2024-09-11T17:17:18.000Z
2
false
5e3f967050720200fe63b2f8a62e3f6ac5c096ea
{"model_type": "gemma2"}
130,857
[ "transformers", "gguf", "gemma2", "conversational", "text-generation", "base_model:google/gemma-2-9b-it", "base_model:quantized:google/gemma-2-9b-it", "license:gemma", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-16T14:47:29.000Z
SanctumAI/gemma-2-9b-it-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "gemma-2-9b-it.Q2_K.gguf" }, { "rfilename": "gemma-2-9b-it.Q3_K_L.gguf" }, { "rfilename": "gemma-2-9b-it.Q3_K_M.gguf" }, { "rfilename": "gemma-2-9b-it.Q3_K_S.gguf" }, { "rfilename": "gemma-2-9b-it.Q4_0.gguf" }, { "rfilename": "gemma-2-9b-it.Q4_1.gguf" }, { "rfilename": "gemma-2-9b-it.Q4_K.gguf" }, { "rfilename": "gemma-2-9b-it.Q4_K_M.gguf" }, { "rfilename": "gemma-2-9b-it.Q4_K_S.gguf" }, { "rfilename": "gemma-2-9b-it.Q5_0.gguf" }, { "rfilename": "gemma-2-9b-it.Q5_1.gguf" }, { "rfilename": "gemma-2-9b-it.Q5_K.gguf" }, { "rfilename": "gemma-2-9b-it.Q5_K_M.gguf" }, { "rfilename": "gemma-2-9b-it.Q5_K_S.gguf" }, { "rfilename": "gemma-2-9b-it.Q6_K.gguf" }, { "rfilename": "gemma-2-9b-it.Q8_0.gguf" }, { "rfilename": "gemma-2-9b-it.f16.gguf" } ]
gemma2
google/gemma-2-9b-it
131
text-generation
131
0
0
0
0
756,278
64b6ce072edcb4a1aa820cbe
TheBloke/Llama-2-7B-Chat-GPTQ
TheBloke
False
explicit-opt-out
2023-09-27T12:44:48.000Z
258
false
d5ad9310836dd91b6ac6133e2e47f47394386cea
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}}}
110,199
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-7b-chat-hf", "base_model:quantized:meta-llama/Llama-2-7b-chat-hf", "license:llama2", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-07-18T17:38:15.000Z
TheBloke/Llama-2-7B-Chat-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-2-7b-chat-hf
1,328
text-generation
1,328
13
0
3
0
247,838
66303852b8d7220d89548ffc
NousResearch/Hermes-2-Pro-Llama-3-8B
NousResearch
False
cold
2024-09-14T16:29:41.000Z
400
false
f798274b30e7d2d4797c369edcc0cc7473b6e6f2
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": [{"name": "default", "template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"}, {"name": "tool_use", "template": "{%- macro json_to_python_type(json_spec) %}\n{%- set basic_type_map = {\n \"string\": \"str\",\n \"number\": \"float\",\n \"integer\": \"int\",\n \"boolean\": \"bool\"\n} %}\n\n{%- if basic_type_map[json_spec.type] is defined %}\n {{- basic_type_map[json_spec.type] }}\n{%- elif json_spec.type == \"array\" %}\n {{- \"list[\" + json_to_python_type(json_spec|items) + \"]\"}}\n{%- elif json_spec.type == \"object\" %}\n {%- if json_spec.additionalProperties is defined %}\n {{- \"dict[str, \" + json_to_python_type(json_spec.additionalProperties) + ']'}}\n {%- else %}\n {{- \"dict\" }}\n {%- endif %}\n{%- elif json_spec.type is iterable %}\n {{- \"Union[\" }}\n {%- for t in json_spec.type %}\n {{- json_to_python_type({\"type\": t}) }}\n {%- if not loop.last %}\n {{- \",\" }} \n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n{%- else %}\n {{- \"Any\" }}\n{%- endif %}\n{%- endmacro %}\n\n\n{{- bos_token }}\n{{- '<|im_start|>system\n' }}\n{{- \"You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: <tools> \" }}\n{%- for tool in tools %}\n {%- if tool.function is defined %}\n {%- set tool = tool.function %}\n {%- endif %}\n {{- '{\"type\": \"function\", \"function\": ' }}\n {{- '{\"name\": \"' + tool.name + '\", ' }}\n {{- '\"description\": \"' + tool.name + '(' }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {{- param_name + \": \" + json_to_python_type(param_fields) }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- if tool.return is defined %}\n {{- \" -> \" + json_to_python_type(tool.return) }}\n {%- endif %}\n {{- \" - \" + tool.description + \"\n\n\" }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {%- if loop.first %}\n {{- \" Args:\n\" }}\n {%- endif %}\n {{- \" \" + param_name + \"(\" + json_to_python_type(param_fields) + \"): \" + param_fields.description|trim }}\n {%- endfor %}\n {%- if tool.return is defined and tool.return.description is defined %}\n {{- \"\n Returns:\n \" + tool.return.description }}\n {%- endif %}\n {{- '\"' }}\n {{- ', \"parameters\": ' }}\n {%- if tool.parameters.properties | length == 0 %}\n {{- \"{}\" }}\n {%- else %}\n {{- tool.parameters|tojson }}\n {%- endif %}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \"\n\" }}\n {%- endif %}\n{%- endfor %}\n{{- \" </tools>\" }}\n{{- 'Use the following pydantic model json schema for each tool call you will make: {\"properties\": {\"name\": {\"title\": \"Name\", \"type\": \"string\"}, \"arguments\": {\"title\": \"Arguments\", \"type\": \"object\"}}, \"required\": [\"name\", \"arguments\"], \"title\": \"FunctionCall\", \"type\": \"object\"}}\n' }}\n{{- \"For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n\" }}\n{{- \"<tool_call>\n\" }}\n{{- '{\"name\": <function-name>, \"arguments\": <args-dict>}\n' }}\n{{- '</tool_call><|im_end|>\n' }}\n{%- for message in messages %}\n {%- if message.role == \"user\" or message.role == \"system\" or (message.role == \"assistant\" and message.tool_calls is not defined) %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- for tool_call in message.tool_calls %}\n {{- '\n<tool_call>\n' }} {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '{' }}\n {{- '\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\"' }}\n {{- ', '}}\n {%- if tool_call.arguments is defined %}\n {{- '\"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments|tojson }}\n {%- endif %}\n {%- endif %}\n {{- '}' }}\n {{- '\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.previtem and loop.previtem.role != \"tool\" %}\n {{- '<|im_start|>tool\n' }}\n {%- endif %}\n {{- '<tool_response>\n' }}\n {{- message.content }}\n {%- if not loop.last %}\n {{- '\n</tool_response>\n' }}\n {%- else %}\n {{- '\n</tool_response>' }}\n {%- endif %}\n {%- if not loop.last and loop.nextitem.role != \"tool\" %}\n {{- '<|im_end|>' }}\n {%- elif loop.last %}\n {{- '<|im_end|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}\n"}], "eos_token": "<|im_end|>", "pad_token": "<|end_of_text|>"}}
97,818
[ "transformers", "safetensors", "llama", "text-generation", "Llama-3", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "axolotl", "conversational", "en", "dataset:teknium/OpenHermes-2.5", "base_model:NousResearch/Meta-Llama-3-8B", "base_model:finetune:NousResearch/Meta-Llama-3-8B", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-30T00:16:18.000Z
NousResearch/Hermes-2-Pro-Llama-3-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "dpo-adapter/adapter_config.json" }, { "rfilename": "dpo-adapter/adapter_model.safetensors" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
NousResearch/Meta-Llama-3-8B
101
text-generation
101
2
32
73
35
612,280
662a2d8b084d6b7ab42d8a15
MLP-KTLim/llama-3-Korean-Bllossom-8B
MLP-KTLim
False
cold
2024-08-12T04:01:02.000Z
258
false
10acb1aa4f341f2d3c899d78c520b0822a909b95
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|eot_id|>", "pad_token": "<|end_of_text|>"}}
85,510
[ "transformers", "safetensors", "llama", "text-generation", "conversational", "en", "ko", "arxiv:2403.10882", "arxiv:2403.11399", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:finetune:meta-llama/Meta-Llama-3-8B", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-25T10:16:43.000Z
MLP-KTLim/llama-3-Korean-Bllossom-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B
976
text-generation
976
13
13
22
28
604,596
64f60811b8cc49b414fe5cdf
TheBloke/Llama-2-7B-Chat-GGUF
TheBloke
False
explicit-opt-out
2023-10-14T21:36:33.000Z
426
false
191239b3e26b2882fb562ffccdd1cf0f65402adb
{"model_type": "llama"}
79,952
[ "transformers", "gguf", "llama", "facebook", "meta", "pytorch", "llama-2", "text-generation", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-7b-chat-hf", "base_model:quantized:meta-llama/Llama-2-7b-chat-hf", "license:llama2", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-09-04T16:38:41.000Z
TheBloke/Llama-2-7B-Chat-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "llama-2-7b-chat.Q2_K.gguf" }, { "rfilename": "llama-2-7b-chat.Q3_K_L.gguf" }, { "rfilename": "llama-2-7b-chat.Q3_K_M.gguf" }, { "rfilename": "llama-2-7b-chat.Q3_K_S.gguf" }, { "rfilename": "llama-2-7b-chat.Q4_0.gguf" }, { "rfilename": "llama-2-7b-chat.Q4_K_M.gguf" }, { "rfilename": "llama-2-7b-chat.Q4_K_S.gguf" }, { "rfilename": "llama-2-7b-chat.Q5_0.gguf" }, { "rfilename": "llama-2-7b-chat.Q5_K_M.gguf" }, { "rfilename": "llama-2-7b-chat.Q5_K_S.gguf" }, { "rfilename": "llama-2-7b-chat.Q6_K.gguf" }, { "rfilename": "llama-2-7b-chat.Q8_0.gguf" } ]
llama
meta-llama/Llama-2-7b-chat-hf
1,328
text-generation
1,328
0
0
0
0
303,198
657759d1da3f14fa5ddeba81
TheBloke/Mixtral-8x7B-Instruct-v0.1-GPTQ
TheBloke
False
explicit-opt-out
2023-12-14T14:30:44.000Z
130
false
0f81ba4680ccd2bce163334b93305d40b9e27b09
{"architectures": ["MixtralForCausalLM"], "model_type": "mixtral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false, "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}"}}
70,053
[ "transformers", "safetensors", "mixtral", "text-generation", "conversational", "fr", "it", "de", "es", "en", "base_model:mistralai/Mixtral-8x7B-Instruct-v0.1", "base_model:quantized:mistralai/Mixtral-8x7B-Instruct-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-12-11T18:49:53.000Z
TheBloke/Mixtral-8x7B-Instruct-v0.1-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mixtral
mistralai/Mixtral-8x7B-Instruct-v0.1
166
text-generation
166
0
0
0
0
414,590
664f44644f17b67fdf917d9d
SanctumAI/Mistral-7B-Instruct-v0.3-GGUF
SanctumAI
False
cold
2024-09-15T11:33:21.000Z
3
false
ab77e5f8958a1e3b612b805f89f4695e8451950a
{"model_type": "mistral"}
67,526
[ "transformers", "gguf", "mistral", "text-generation", "base_model:mistralai/Mistral-7B-Instruct-v0.3", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.3", "license:apache-2.0", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-23T13:28:04.000Z
SanctumAI/Mistral-7B-Instruct-v0.3-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "mistral-7b-instruct-v0.3.Q2_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q3_K_L.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q3_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q3_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q4_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q4_1.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q4_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q4_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q4_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q5_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q5_1.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q5_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q5_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q5_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q6_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.Q8_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.3.f16.gguf" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.3
320
text-generation
320
0
0
0
0
654,981
6591dd37a02954c9825f00f1
TheBloke/TinyLlama-1.1B-Chat-v1.0-GPTQ
TheBloke
False
explicit-opt-out
2023-12-31T21:33:52.000Z
12
false
9d4580af0f21bccafd762dcc50d0c7bac6273584
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": false}}
67,033
[ "transformers", "safetensors", "llama", "text-generation", "conversational", "en", "dataset:cerebras/SlimPajama-627B", "dataset:bigcode/starcoderdata", "dataset:OpenAssistant/oasst_top1_2023-08-25", "base_model:TinyLlama/TinyLlama-1.1B-Chat-v1.0", "base_model:quantized:TinyLlama/TinyLlama-1.1B-Chat-v1.0", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-12-31T21:29:27.000Z
TheBloke/TinyLlama-1.1B-Chat-v1.0-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
TinyLlama/TinyLlama-1.1B-Chat-v1.0
687
text-generation
687
0
0
1
0
436,937
66cbcec8e0cdc14fc0b45840
smeby/Qwen-Qwen1.5-7B-1724632776
smeby
False
pipeline-not-detected
2024-08-26T00:39:52.000Z
0
false
45923cca577d6ac53d6b9c212c8c6e07ebd65d97
{"tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|endoftext|>", "pad_token": "<|endoftext|>", "unk_token": null}, "peft": {"base_model_name_or_path": "Qwen/Qwen1.5-7B", "task_type": "CAUSAL_LM"}}
49,744
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:Qwen/Qwen1.5-7B", "base_model:adapter:Qwen/Qwen1.5-7B", "region:us" ]
null
peft
2024-08-26T00:39:36.000Z
smeby/Qwen-Qwen1.5-7B-1724632776
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "adapter_config.json" }, { "rfilename": "adapter_model.safetensors" }, { "rfilename": "added_tokens.json" }, { "rfilename": "checkpoint-31/README.md" }, { "rfilename": "checkpoint-31/adapter_config.json" }, { "rfilename": "checkpoint-31/adapter_model.safetensors" }, { "rfilename": "checkpoint-31/added_tokens.json" }, { "rfilename": "checkpoint-31/merges.txt" }, { "rfilename": "checkpoint-31/optimizer.pt" }, { "rfilename": "checkpoint-31/rng_state.pth" }, { "rfilename": "checkpoint-31/scheduler.pt" }, { "rfilename": "checkpoint-31/special_tokens_map.json" }, { "rfilename": "checkpoint-31/tokenizer.json" }, { "rfilename": "checkpoint-31/tokenizer_config.json" }, { "rfilename": "checkpoint-31/trainer_state.json" }, { "rfilename": "checkpoint-31/training_args.bin" }, { "rfilename": "checkpoint-31/vocab.json" }, { "rfilename": "checkpoint-62/README.md" }, { "rfilename": "checkpoint-62/adapter_config.json" }, { "rfilename": "checkpoint-62/adapter_model.safetensors" }, { "rfilename": "checkpoint-62/added_tokens.json" }, { "rfilename": "checkpoint-62/merges.txt" }, { "rfilename": "checkpoint-62/optimizer.pt" }, { "rfilename": "checkpoint-62/rng_state.pth" }, { "rfilename": "checkpoint-62/scheduler.pt" }, { "rfilename": "checkpoint-62/special_tokens_map.json" }, { "rfilename": "checkpoint-62/tokenizer.json" }, { "rfilename": "checkpoint-62/tokenizer_config.json" }, { "rfilename": "checkpoint-62/trainer_state.json" }, { "rfilename": "checkpoint-62/training_args.bin" }, { "rfilename": "checkpoint-62/vocab.json" }, { "rfilename": "merges.txt" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "training_args.bin" }, { "rfilename": "vocab.json" } ]
null
Qwen/Qwen1.5-7B
6,515
text-generation
6,515
0
0
0
0
860,315
65778ac63ceeb2f078e6c864
TheBloke/Mistral-7B-Instruct-v0.2-GPTQ
TheBloke
False
explicit-opt-out
2023-12-11T22:46:53.000Z
48
false
7532d6bc89ef9300fb39d2d94ed4414ec534b72a
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false, "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}"}}
47,926
[ "transformers", "safetensors", "mistral", "text-generation", "finetuned", "conversational", "arxiv:2310.06825", "base_model:mistralai/Mistral-7B-Instruct-v0.2", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.2", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-12-11T22:18:46.000Z
TheBloke/Mistral-7B-Instruct-v0.2-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.2
1,321
text-generation
1,321
269
0
2
1
414,764
65778ac658d7a2cc892266d6
TheBloke/Mistral-7B-Instruct-v0.2-AWQ
TheBloke
False
explicit-opt-out
2023-12-11T22:36:20.000Z
41
false
f970a2bb89d5c2f9d217dc337f39e24625d6462a
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "awq"}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "</s>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": false, "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}"}}
46,303
[ "transformers", "safetensors", "mistral", "text-generation", "finetuned", "conversational", "arxiv:2310.06825", "base_model:mistralai/Mistral-7B-Instruct-v0.2", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.2", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-12-11T22:18:46.000Z
TheBloke/Mistral-7B-Instruct-v0.2-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.2
1,321
text-generation
1,321
15
0
0
0
414,765
659a5e051b4a26041af869ac
openchat/openchat-3.5-0106
openchat
False
not-popular-enough
2024-05-18T18:14:51.000Z
342
false
ff058fda49726ecf4ea53dc1635f917cdb8ba36b
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}", "eos_token": "<|end_of_turn|>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
45,756
[ "transformers", "safetensors", "mistral", "text-generation", "openchat", "C-RLFT", "conversational", "arxiv:2309.11235", "arxiv:2303.08774", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-01-07T08:17:09.000Z
openchat/openchat-3.5-0106
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00003.safetensors" }, { "rfilename": "model-00002-of-00003.safetensors" }, { "rfilename": "model-00003-of-00003.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "openchat.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
16
28
30
22
443,802
66616ddf3fdeeb338135dbef
Qwen/Qwen2-0.5B-Instruct-GGUF
Qwen
False
library-not-detected
2024-08-21T10:29:53.000Z
54
false
198f08841147e5196a6a69bd0053690fb1fd3857
null
45,743
[ "gguf", "instruct", "chat", "text-generation", "en", "base_model:Qwen/Qwen2-0.5B-Instruct", "base_model:quantized:Qwen/Qwen2-0.5B-Instruct", "license:apache-2.0", "region:us" ]
text-generation
null
2024-06-06T08:05:51.000Z
Qwen/Qwen2-0.5B-Instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "qwen2-0_5b-instruct-fp16.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q2_k.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q3_k_m.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q4_0.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q4_k_m.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q5_0.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q5_k_m.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q6_k.gguf" }, { "rfilename": "qwen2-0_5b-instruct-q8_0.gguf" } ]
null
Qwen/Qwen2-0.5B-Instruct
178
text-generation
178
0
0
0
0
685,351
653ec25781277ed9683d246f
teknium/OpenHermes-2.5-Mistral-7B
teknium
False
not-popular-enough
2024-02-19T17:53:06.000Z
803
false
24c0bea14d53e6f67f1fbe2eca5bfe7cae389b33
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "eos_token": "<|im_end|>", "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
45,623
[ "transformers", "pytorch", "safetensors", "mistral", "text-generation", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "conversational", "en", "dataset:teknium/OpenHermes-2.5", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-29T20:36:39.000Z
teknium/OpenHermes-2.5-Mistral-7B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model-00001-of-00002.bin" }, { "rfilename": "pytorch_model-00002-of-00002.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "transformers_inference.py" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
96
37
71
30
360,799
65775021c991ca09567c1237
TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF
TheBloke
False
explicit-opt-out
2023-12-14T14:30:43.000Z
596
false
fa1d3835c5d45a3a74c0b68805fcdc133dba2b6a
{"model_type": "mixtral"}
45,482
[ "transformers", "gguf", "mixtral", "fr", "it", "de", "es", "en", "base_model:mistralai/Mixtral-8x7B-Instruct-v0.1", "base_model:quantized:mistralai/Mixtral-8x7B-Instruct-v0.1", "license:apache-2.0", "text-generation-inference", "region:us" ]
null
transformers
2023-12-11T18:08:33.000Z
TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q2_K.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q3_K_M.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q4_0.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q4_K_M.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q5_0.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q5_K_M.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q6_K.gguf" }, { "rfilename": "mixtral-8x7b-instruct-v0.1.Q8_0.gguf" } ]
mixtral
mistralai/Mixtral-8x7B-Instruct-v0.1
166
text-generation
166
0
0
0
0
414,555
66a5de99a28bc058db44d78f
NousResearch/Hermes-3-Llama-3.1-8B
NousResearch
False
warm
2024-09-08T07:39:55.000Z
195
false
896ea440e5a9e6070e3d8a2774daf2b481ab425b
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": [{"name": "default", "template": "{{bos_token}}{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"}, {"name": "tool_use", "template": "{%- macro json_to_python_type(json_spec) %}\n{%- set basic_type_map = {\n \"string\": \"str\",\n \"number\": \"float\",\n \"integer\": \"int\",\n \"boolean\": \"bool\"\n} %}\n\n{%- if basic_type_map[json_spec.type] is defined %}\n {{- basic_type_map[json_spec.type] }}\n{%- elif json_spec.type == \"array\" %}\n {{- \"list[\" + json_to_python_type(json_spec|items) + \"]\"}}\n{%- elif json_spec.type == \"object\" %}\n {%- if json_spec.additionalProperties is defined %}\n {{- \"dict[str, \" + json_to_python_type(json_spec.additionalProperties) + ']'}}\n {%- else %}\n {{- \"dict\" }}\n {%- endif %}\n{%- elif json_spec.type is iterable %}\n {{- \"Union[\" }}\n {%- for t in json_spec.type %}\n {{- json_to_python_type({\"type\": t}) }}\n {%- if not loop.last %}\n {{- \",\" }} \n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n{%- else %}\n {{- \"Any\" }}\n{%- endif %}\n{%- endmacro %}\n\n\n{{- bos_token }}\n{{- '<|im_start|>system\n' }}\n{{- \"You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: <tools> \" }}\n{%- for tool in tools %}\n {%- if tool.function is defined %}\n {%- set tool = tool.function %}\n {%- endif %}\n {{- '{\"type\": \"function\", \"function\": ' }}\n {{- '{\"name\": \"' + tool.name + '\", ' }}\n {{- '\"description\": \"' + tool.name + '(' }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {{- param_name + \": \" + json_to_python_type(param_fields) }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- if tool.return is defined %}\n {{- \" -> \" + json_to_python_type(tool.return) }}\n {%- endif %}\n {{- \" - \" + tool.description + \"\n\n\" }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {%- if loop.first %}\n {{- \" Args:\n\" }}\n {%- endif %}\n {{- \" \" + param_name + \"(\" + json_to_python_type(param_fields) + \"): \" + param_fields.description|trim }}\n {%- endfor %}\n {%- if tool.return is defined and tool.return.description is defined %}\n {{- \"\n Returns:\n \" + tool.return.description }}\n {%- endif %}\n {{- '\"' }}\n {{- ', \"parameters\": ' }}\n {%- if tool.parameters.properties | length == 0 %}\n {{- \"{}\" }}\n {%- else %}\n {{- tool.parameters|tojson }}\n {%- endif %}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \"\n\" }}\n {%- endif %}\n{%- endfor %}\n{{- \" </tools>\" }}\n{{- 'Use the following pydantic model json schema for each tool call you will make: {\"properties\": {\"name\": {\"title\": \"Name\", \"type\": \"string\"}, \"arguments\": {\"title\": \"Arguments\", \"type\": \"object\"}}, \"required\": [\"name\", \"arguments\"], \"title\": \"FunctionCall\", \"type\": \"object\"}}\n' }}\n{{- \"For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n\" }}\n{{- \"<tool_call>\n\" }}\n{{- '{\"name\": <function-name>, \"arguments\": <args-dict>}\n' }}\n{{- '</tool_call><|im_end|>\n' }}\n{%- for message in messages %}\n {%- if message.role == \"user\" or message.role == \"system\" or (message.role == \"assistant\" and message.tool_calls is not defined) %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- for tool_call in message.tool_calls %}\n {{- '\n<tool_call>\n' }} {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '{' }}\n {{- '\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\"' }}\n {{- ', '}}\n {%- if tool_call.arguments is defined %}\n {{- '\"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments|tojson }}\n {%- endif %}\n {%- endif %}\n {{- '}' }}\n {{- '\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.previtem and loop.previtem.role != \"tool\" %}\n {{- '<|im_start|>tool\n' }}\n {%- endif %}\n {{- '<tool_response>\n' }}\n {{- message.content }}\n {%- if not loop.last %}\n {{- '\n</tool_response>\n' }}\n {%- else %}\n {{- '\n</tool_response>' }}\n {%- endif %}\n {%- if not loop.last and loop.nextitem.role != \"tool\" %}\n {{- '<|im_end|>' }}\n {%- elif loop.last %}\n {{- '<|im_end|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}\n"}], "eos_token": "<|im_end|>", "pad_token": "<|im_end|>"}}
42,774
[ "transformers", "safetensors", "llama", "text-generation", "Llama-3", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "axolotl", "roleplaying", "chat", "conversational", "en", "arxiv:2408.11857", "base_model:meta-llama/Llama-3.1-8B", "base_model:finetune:meta-llama/Llama-3.1-8B", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-28T06:00:57.000Z
NousResearch/Hermes-3-Llama-3.1-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-3.1-8B
254
text-generation
254
1
16
10
33
782,842
663238fcd5677e328794061c
DeepMount00/Llama-3-8b-Ita
DeepMount00
False
not-popular-enough
2024-08-13T11:16:20.000Z
23
false
9e5753beda3704492ae340e0321ad516a58b9ffd
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|eot_id|>"}}
40,242
[ "transformers", "safetensors", "llama", "text-generation", "conversational", "it", "en", "dataset:DeepMount00/llm_ita_ultra", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:finetune:meta-llama/Meta-Llama-3-8B", "license:llama3", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-01T12:43:40.000Z
DeepMount00/Llama-3-8b-Ita
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B
976
text-generation
976
0
43
0
2
614,878
66d8ab8cebc4415df3ff3836
modularai/llama-3.1
modularai
False
explicit-opt-out
2024-09-09T16:49:02.000Z
0
false
966694508430d1177f6d585de779250e7a34bc3a
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", "eos_token": "<|eot_id|>"}}
29,726
[ "gguf", "llama", "facebook", "meta", "pytorch", "llama-3", "text-generation", "conversational", "en", "arxiv:2204.05149", "base_model:meta-llama/Llama-3.1-8B-Instruct", "base_model:quantized:meta-llama/Llama-3.1-8B-Instruct", "license:llama3", "region:us" ]
text-generation
null
2024-09-04T18:48:44.000Z
modularai/llama-3.1
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "llama-3.1-8b-instruct-bf16.gguf" }, { "rfilename": "llama-3.1-8b-instruct-f32.gguf" }, { "rfilename": "llama-3.1-8b-instruct-q4_0.gguf" }, { "rfilename": "llama-3.1-8b-instruct-q4_k_m.gguf" }, { "rfilename": "llama-3.1-8b-instruct-q6_k.gguf" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-3.1-8B-Instruct
695
text-generation
695
0
0
0
0
898,568
6591d4d754f88261730df832
TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF
TheBloke
False
explicit-opt-out
2023-12-31T21:29:33.000Z
110
false
52e7645ba7c309695bec7ac98f4f005b139cf465
{"model_type": "tinyllama"}
28,570
[ "transformers", "gguf", "tinyllama", "en", "dataset:cerebras/SlimPajama-627B", "dataset:bigcode/starcoderdata", "dataset:OpenAssistant/oasst_top1_2023-08-25", "base_model:TinyLlama/TinyLlama-1.1B-Chat-v1.0", "base_model:quantized:TinyLlama/TinyLlama-1.1B-Chat-v1.0", "license:apache-2.0", "region:us" ]
null
transformers
2023-12-31T20:53:43.000Z
TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q2_K.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q3_K_L.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q3_K_M.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q3_K_S.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q4_0.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q4_K_S.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q5_0.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q5_K_M.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q5_K_S.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q6_K.gguf" }, { "rfilename": "tinyllama-1.1b-chat-v1.0.Q8_0.gguf" } ]
tinyllama
TinyLlama/TinyLlama-1.1B-Chat-v1.0
687
text-generation
687
0
0
2
0
436,921
657f5104476260623ddf5081
mlc-ai/Llama-2-7b-chat-hf-q4f32_1-MLC
mlc-ai
False
pipeline-not-detected
2024-07-11T15:31:37.000Z
2
false
bab021ce3e8b5715dc8679df1661c32a5d1e26da
{"tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}}}
28,221
[ "mlc-llm", "web-llm", "base_model:meta-llama/Llama-2-7b-chat-hf", "base_model:quantized:meta-llama/Llama-2-7b-chat-hf", "region:us" ]
null
mlc-llm
2023-12-17T19:50:28.000Z
mlc-ai/Llama-2-7b-chat-hf-q4f32_1-MLC
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "logs.txt" }, { "rfilename": "mlc-chat-config.json" }, { "rfilename": "ndarray-cache.json" }, { "rfilename": "params_shard_0.bin" }, { "rfilename": "params_shard_1.bin" }, { "rfilename": "params_shard_10.bin" }, { "rfilename": "params_shard_100.bin" }, { "rfilename": "params_shard_101.bin" }, { "rfilename": "params_shard_102.bin" }, { "rfilename": "params_shard_103.bin" }, { "rfilename": "params_shard_104.bin" }, { "rfilename": "params_shard_105.bin" }, { "rfilename": "params_shard_106.bin" }, { "rfilename": "params_shard_107.bin" }, { "rfilename": "params_shard_108.bin" }, { "rfilename": "params_shard_109.bin" }, { "rfilename": "params_shard_11.bin" }, { "rfilename": "params_shard_110.bin" }, { "rfilename": "params_shard_111.bin" }, { "rfilename": "params_shard_112.bin" }, { "rfilename": "params_shard_113.bin" }, { "rfilename": "params_shard_114.bin" }, { "rfilename": "params_shard_115.bin" }, { "rfilename": "params_shard_116.bin" }, { "rfilename": "params_shard_117.bin" }, { "rfilename": "params_shard_118.bin" }, { "rfilename": "params_shard_119.bin" }, { "rfilename": "params_shard_12.bin" }, { "rfilename": "params_shard_120.bin" }, { "rfilename": "params_shard_121.bin" }, { "rfilename": "params_shard_122.bin" }, { "rfilename": "params_shard_123.bin" }, { "rfilename": "params_shard_124.bin" }, { "rfilename": "params_shard_125.bin" }, { "rfilename": "params_shard_126.bin" }, { "rfilename": "params_shard_127.bin" }, { "rfilename": "params_shard_128.bin" }, { "rfilename": "params_shard_129.bin" }, { "rfilename": "params_shard_13.bin" }, { "rfilename": "params_shard_130.bin" }, { "rfilename": "params_shard_131.bin" }, { "rfilename": "params_shard_14.bin" }, { "rfilename": "params_shard_15.bin" }, { "rfilename": "params_shard_16.bin" }, { "rfilename": "params_shard_17.bin" }, { "rfilename": "params_shard_18.bin" }, { "rfilename": "params_shard_19.bin" }, { "rfilename": "params_shard_2.bin" }, { "rfilename": "params_shard_20.bin" }, { "rfilename": "params_shard_21.bin" }, { "rfilename": "params_shard_22.bin" }, { "rfilename": "params_shard_23.bin" }, { "rfilename": "params_shard_24.bin" }, { "rfilename": "params_shard_25.bin" }, { "rfilename": "params_shard_26.bin" }, { "rfilename": "params_shard_27.bin" }, { "rfilename": "params_shard_28.bin" }, { "rfilename": "params_shard_29.bin" }, { "rfilename": "params_shard_3.bin" }, { "rfilename": "params_shard_30.bin" }, { "rfilename": "params_shard_31.bin" }, { "rfilename": "params_shard_32.bin" }, { "rfilename": "params_shard_33.bin" }, { "rfilename": "params_shard_34.bin" }, { "rfilename": "params_shard_35.bin" }, { "rfilename": "params_shard_36.bin" }, { "rfilename": "params_shard_37.bin" }, { "rfilename": "params_shard_38.bin" }, { "rfilename": "params_shard_39.bin" }, { "rfilename": "params_shard_4.bin" }, { "rfilename": "params_shard_40.bin" }, { "rfilename": "params_shard_41.bin" }, { "rfilename": "params_shard_42.bin" }, { "rfilename": "params_shard_43.bin" }, { "rfilename": "params_shard_44.bin" }, { "rfilename": "params_shard_45.bin" }, { "rfilename": "params_shard_46.bin" }, { "rfilename": "params_shard_47.bin" }, { "rfilename": "params_shard_48.bin" }, { "rfilename": "params_shard_49.bin" }, { "rfilename": "params_shard_5.bin" }, { "rfilename": "params_shard_50.bin" }, { "rfilename": "params_shard_51.bin" }, { "rfilename": "params_shard_52.bin" }, { "rfilename": "params_shard_53.bin" }, { "rfilename": "params_shard_54.bin" }, { "rfilename": "params_shard_55.bin" }, { "rfilename": "params_shard_56.bin" }, { "rfilename": "params_shard_57.bin" }, { "rfilename": "params_shard_58.bin" }, { "rfilename": "params_shard_59.bin" }, { "rfilename": "params_shard_6.bin" }, { "rfilename": "params_shard_60.bin" }, { "rfilename": "params_shard_61.bin" }, { "rfilename": "params_shard_62.bin" }, { "rfilename": "params_shard_63.bin" }, { "rfilename": "params_shard_64.bin" }, { "rfilename": "params_shard_65.bin" }, { "rfilename": "params_shard_66.bin" }, { "rfilename": "params_shard_67.bin" }, { "rfilename": "params_shard_68.bin" }, { "rfilename": "params_shard_69.bin" }, { "rfilename": "params_shard_7.bin" }, { "rfilename": "params_shard_70.bin" }, { "rfilename": "params_shard_71.bin" }, { "rfilename": "params_shard_72.bin" }, { "rfilename": "params_shard_73.bin" }, { "rfilename": "params_shard_74.bin" }, { "rfilename": "params_shard_75.bin" }, { "rfilename": "params_shard_76.bin" }, { "rfilename": "params_shard_77.bin" }, { "rfilename": "params_shard_78.bin" }, { "rfilename": "params_shard_79.bin" }, { "rfilename": "params_shard_8.bin" }, { "rfilename": "params_shard_80.bin" }, { "rfilename": "params_shard_81.bin" }, { "rfilename": "params_shard_82.bin" }, { "rfilename": "params_shard_83.bin" }, { "rfilename": "params_shard_84.bin" }, { "rfilename": "params_shard_85.bin" }, { "rfilename": "params_shard_86.bin" }, { "rfilename": "params_shard_87.bin" }, { "rfilename": "params_shard_88.bin" }, { "rfilename": "params_shard_89.bin" }, { "rfilename": "params_shard_9.bin" }, { "rfilename": "params_shard_90.bin" }, { "rfilename": "params_shard_91.bin" }, { "rfilename": "params_shard_92.bin" }, { "rfilename": "params_shard_93.bin" }, { "rfilename": "params_shard_94.bin" }, { "rfilename": "params_shard_95.bin" }, { "rfilename": "params_shard_96.bin" }, { "rfilename": "params_shard_97.bin" }, { "rfilename": "params_shard_98.bin" }, { "rfilename": "params_shard_99.bin" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
null
meta-llama/Llama-2-7b-chat-hf
1,328
text-generation
1,328
0
0
0
0
422,014
66a2ab4c3c7f0decff7157d6
lmstudio-community/Phi-3.1-mini-128k-instruct-GGUF
lmstudio-community
False
library-not-detected
2024-07-25T19:49:22.000Z
2
false
6fb38f94bbd8e88338e0b965d056e0d780b5f8b7
null
25,199
[ "gguf", "nlp", "code", "text-generation", "en", "arxiv:2404.14219", "base_model:microsoft/Phi-3-mini-128k-instruct", "base_model:quantized:microsoft/Phi-3-mini-128k-instruct", "license:mit", "region:us" ]
text-generation
null
2024-07-25T19:45:16.000Z
lmstudio-community/Phi-3.1-mini-128k-instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Phi-3.1-mini-128k-instruct-IQ3_M.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-IQ4_XS.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-Q3_K_L.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-Q4_K_M.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-Q5_K_M.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-Q6_K.gguf" }, { "rfilename": "Phi-3.1-mini-128k-instruct-Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
microsoft/Phi-3-mini-128k-instruct
196
text-generation
196
0
0
0
0
778,040
654cce913a321f06c2b5ba05
alignment-handbook/zephyr-7b-sft-full
alignment-handbook
False
not-popular-enough
2024-01-10T03:18:43.000Z
21
false
92f9fac4529acacb2c33a35c46917393690c6311
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": false}}
24,850
[ "transformers", "tensorboard", "safetensors", "mistral", "text-generation", "alignment-handbook", "generated_from_trainer", "trl", "sft", "conversational", "dataset:HuggingFaceH4/ultrachat_200k", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-11-09T12:20:33.000Z
alignment-handbook/zephyr-7b-sft-full
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "all_results.json" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00003.safetensors" }, { "rfilename": "model-00002-of-00003.safetensors" }, { "rfilename": "model-00003-of-00003.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "runs/Jan08_12-40-07_ip-26-0-175-170/events.out.tfevents.1704718278.ip-26-0-175-170.1661731.0" }, { "rfilename": "runs/Jan08_12-40-07_ip-26-0-175-170/events.out.tfevents.1704723892.ip-26-0-175-170.1661731.1" }, { "rfilename": "runs/Jan08_21-58-35_ip-26-0-164-18/events.out.tfevents.1704751144.ip-26-0-164-18.2758079.0" }, { "rfilename": "runs/Jan08_21-58-35_ip-26-0-164-18/events.out.tfevents.1704756894.ip-26-0-164-18.2758079.1" }, { "rfilename": "runs/Jan09_00-05-47_ip-26-0-164-18/events.out.tfevents.1704758820.ip-26-0-164-18.2771624.0" }, { "rfilename": "runs/Jan09_00-05-47_ip-26-0-164-18/events.out.tfevents.1704759527.ip-26-0-164-18.2771624.1" }, { "rfilename": "runs/Jan09_22-11-21_ip-26-0-162-180/events.out.tfevents.1704838311.ip-26-0-162-180.3647178.0" }, { "rfilename": "runs/Jan09_22-11-21_ip-26-0-162-180/events.out.tfevents.1704844068.ip-26-0-162-180.3647178.1" }, { "rfilename": "runs/Jan10_01-40-06_ip-26-0-168-34/events.out.tfevents.1704850836.ip-26-0-168-34.1133327.0" }, { "rfilename": "runs/Jan10_01-40-06_ip-26-0-168-34/events.out.tfevents.1704856586.ip-26-0-168-34.1133327.1" }, { "rfilename": "runs/Nov09_12-20-08_ip-26-0-145-152/events.out.tfevents.1699532441.ip-26-0-145-152.2480166.0" }, { "rfilename": "runs/Nov09_20-23-56_ip-26-0-155-187/events.out.tfevents.1699561516.ip-26-0-155-187.155279.0" }, { "rfilename": "runs/Nov09_20-23-56_ip-26-0-155-187/events.out.tfevents.1699573490.ip-26-0-155-187.155279.1" }, { "rfilename": "runs/Nov10_00-16-16_ip-26-0-155-187/events.out.tfevents.1699575456.ip-26-0-155-187.230765.0" }, { "rfilename": "runs/Nov10_00-16-16_ip-26-0-155-187/events.out.tfevents.1699587432.ip-26-0-155-187.230765.1" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" }, { "rfilename": "trainer_state.json" }, { "rfilename": "training_args.bin" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
136
0
263
1
372,399
65146b42c75a3d4c44e41667
TheBloke/Mistral-7B-Instruct-v0.1-GGUF
TheBloke
False
explicit-opt-out
2023-12-09T16:09:28.000Z
508
false
731a9fc8f06f5f5e2db8a0cf9d256197eb6e05d1
{"model_type": "mistral"}
24,376
[ "transformers", "gguf", "mistral", "finetuned", "text-generation", "base_model:mistralai/Mistral-7B-Instruct-v0.1", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.1", "license:apache-2.0", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-09-27T17:49:54.000Z
TheBloke/Mistral-7B-Instruct-v0.1-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "mistral-7b-instruct-v0.1.Q2_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q3_K_L.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q3_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q3_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q4_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q4_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q4_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q5_0.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q5_K_M.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q5_K_S.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q6_K.gguf" }, { "rfilename": "mistral-7b-instruct-v0.1.Q8_0.gguf" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.1
500
text-generation
500
0
0
0
0
326,999
6659b8b93114a8eca3bcb84a
numind/NuExtract-tiny
numind
False
not-popular-enough
2024-08-23T14:49:20.000Z
35
false
c4fca7faff3f8ef4f020fd22cf4480b7f2d82c14
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|end-output|>", "pad_token": "<|endoftext|>", "unk_token": null}}
23,877
[ "transformers", "safetensors", "qwen2", "text-generation", "conversational", "en", "base_model:Qwen/Qwen1.5-0.5B", "base_model:finetune:Qwen/Qwen1.5-0.5B", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-31T11:47:05.000Z
numind/NuExtract-tiny
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model.safetensors" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen1.5-0.5B
28,947
text-generation
28,947
0
0
7
3
672,306
667d887808289e6bddd446c7
bartowski/gemma-2-9b-it-GGUF
bartowski
False
not-popular-enough
2024-07-15T18:57:00.000Z
177
false
d731033f3dc4018261fd39896e50984d398b4ac5
null
22,919
[ "transformers", "gguf", "conversational", "text-generation", "base_model:google/gemma-2-9b-it", "base_model:quantized:google/gemma-2-9b-it", "license:gemma", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-06-27T15:42:48.000Z
bartowski/gemma-2-9b-it-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "gemma-2-9b-it-IQ2_M.gguf" }, { "rfilename": "gemma-2-9b-it-IQ2_S.gguf" }, { "rfilename": "gemma-2-9b-it-IQ2_XS.gguf" }, { "rfilename": "gemma-2-9b-it-IQ3_M.gguf" }, { "rfilename": "gemma-2-9b-it-IQ3_XS.gguf" }, { "rfilename": "gemma-2-9b-it-IQ3_XXS.gguf" }, { "rfilename": "gemma-2-9b-it-IQ4_XS.gguf" }, { "rfilename": "gemma-2-9b-it-Q2_K.gguf" }, { "rfilename": "gemma-2-9b-it-Q2_K_L.gguf" }, { "rfilename": "gemma-2-9b-it-Q3_K_L-Q8.gguf" }, { "rfilename": "gemma-2-9b-it-Q3_K_L.gguf" }, { "rfilename": "gemma-2-9b-it-Q3_K_M.gguf" }, { "rfilename": "gemma-2-9b-it-Q3_K_S.gguf" }, { "rfilename": "gemma-2-9b-it-Q3_K_XL.gguf" }, { "rfilename": "gemma-2-9b-it-Q4_K_L.gguf" }, { "rfilename": "gemma-2-9b-it-Q4_K_M-fp16.gguf" }, { "rfilename": "gemma-2-9b-it-Q4_K_M.gguf" }, { "rfilename": "gemma-2-9b-it-Q4_K_S.gguf" }, { "rfilename": "gemma-2-9b-it-Q5_K_L.gguf" }, { "rfilename": "gemma-2-9b-it-Q5_K_M.gguf" }, { "rfilename": "gemma-2-9b-it-Q5_K_S.gguf" }, { "rfilename": "gemma-2-9b-it-Q6_K-Q8.gguf" }, { "rfilename": "gemma-2-9b-it-Q6_K-f32.gguf" }, { "rfilename": "gemma-2-9b-it-Q6_K.gguf" }, { "rfilename": "gemma-2-9b-it-Q6_K_L.gguf" }, { "rfilename": "gemma-2-9b-it-Q8_0-f16.gguf" }, { "rfilename": "gemma-2-9b-it-Q8_0.gguf" }, { "rfilename": "gemma-2-9b-it-Q8_0_L.gguf" }, { "rfilename": "gemma-2-9b-it-f32.gguf" }, { "rfilename": "gemma-2-9b-it.imatrix" } ]
null
google/gemma-2-9b-it
131
text-generation
131
0
0
0
0
725,091
664e2b118e43ee411f6ca3c4
MaziyarPanahi/Mistral-7B-Instruct-v0.3-GGUF
MaziyarPanahi
False
explicit-opt-out
2024-05-22T20:26:06.000Z
58
false
ce89f595755a4bf2e2e05d155cc43cb847c78978
{"model_type": "mistral"}
22,372
[ "transformers", "gguf", "mistral", "quantized", "2-bit", "3-bit", "4-bit", "5-bit", "6-bit", "8-bit", "GGUF", "safetensors", "text-generation", "conversational", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "text-generation-inference", "region:us", "base_model:mistralai/Mistral-7B-Instruct-v0.3", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.3" ]
text-generation
transformers
2024-05-22T17:27:45.000Z
MaziyarPanahi/Mistral-7B-Instruct-v0.3-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Mistral-7B-Instruct-v0.3.IQ1_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.IQ1_S.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.IQ2_XS.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.IQ3_XS.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.IQ4_XS.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q2_K.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q3_K_L.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q3_K_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q3_K_S.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q4_K_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q4_K_S.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q5_K_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q5_K_S.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q6_K.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.Q8_0.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3.fp16.gguf" }, { "rfilename": "README.md" }, { "rfilename": "config.json" } ]
mistral
mistralai/Mistral-7B-Instruct-v0.3
320
text-generation
320
0
0
0
0
653,473
66aa6a19a9ec1810a37eb785
bartowski/gemma-2-2b-it-GGUF
bartowski
False
not-popular-enough
2024-08-05T19:14:36.000Z
26
false
855f67caed130e1befc571b52bd181be2e858883
null
22,089
[ "transformers", "gguf", "conversational", "text-generation", "base_model:google/gemma-2-2b-it", "base_model:quantized:google/gemma-2-2b-it", "license:gemma", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-31T16:45:13.000Z
bartowski/gemma-2-2b-it-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "gemma-2-2b-it-IQ3_M.gguf" }, { "rfilename": "gemma-2-2b-it-IQ4_XS.gguf" }, { "rfilename": "gemma-2-2b-it-Q3_K_L.gguf" }, { "rfilename": "gemma-2-2b-it-Q4_K_M.gguf" }, { "rfilename": "gemma-2-2b-it-Q4_K_S.gguf" }, { "rfilename": "gemma-2-2b-it-Q5_K_M.gguf" }, { "rfilename": "gemma-2-2b-it-Q5_K_S.gguf" }, { "rfilename": "gemma-2-2b-it-Q6_K.gguf" }, { "rfilename": "gemma-2-2b-it-Q6_K_L.gguf" }, { "rfilename": "gemma-2-2b-it-Q8_0.gguf" }, { "rfilename": "gemma-2-2b-it-f32.gguf" }, { "rfilename": "gemma-2-2b-it.imatrix" } ]
null
google/gemma-2-2b-it
265
text-generation
265
0
0
0
0
791,308
6523bd962d3ba46ccd378b5a
HuggingFaceH4/zephyr-7b-alpha
HuggingFaceH4
False
loading
2023-11-21T17:28:11.000Z
1,094
false
2ce2d025864af849b3e5029e2ec9d568eeda892d
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
22,023
[ "transformers", "pytorch", "safetensors", "mistral", "text-generation", "generated_from_trainer", "conversational", "en", "dataset:stingning/ultrachat", "dataset:openbmb/UltraFeedback", "arxiv:2305.18290", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-09T08:45:10.000Z
HuggingFaceH4/zephyr-7b-alpha
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "all_results.json" }, { "rfilename": "colab-demo.ipynb" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00008.safetensors" }, { "rfilename": "model-00002-of-00008.safetensors" }, { "rfilename": "model-00003-of-00008.safetensors" }, { "rfilename": "model-00004-of-00008.safetensors" }, { "rfilename": "model-00005-of-00008.safetensors" }, { "rfilename": "model-00006-of-00008.safetensors" }, { "rfilename": "model-00007-of-00008.safetensors" }, { "rfilename": "model-00008-of-00008.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model-00001-of-00008.bin" }, { "rfilename": "pytorch_model-00002-of-00008.bin" }, { "rfilename": "pytorch_model-00003-of-00008.bin" }, { "rfilename": "pytorch_model-00004-of-00008.bin" }, { "rfilename": "pytorch_model-00005-of-00008.bin" }, { "rfilename": "pytorch_model-00006-of-00008.bin" }, { "rfilename": "pytorch_model-00007-of-00008.bin" }, { "rfilename": "pytorch_model-00008-of-00008.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "thumbnail.png" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" }, { "rfilename": "trainer_state.json" }, { "rfilename": "training_args.bin" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
42
1
12
8
337,729
66de024731e772c5ec381060
silent666/01-ai-Yi-1.5-9B-1725825607
silent666
False
pipeline-not-detected
2024-09-08T20:00:49.000Z
0
false
86e187005fff940a3f68ccd12dd0083deb7a96c0
{"tokenizer_config": {"bos_token": "<|startoftext|>", "eos_token": "<|endoftext|>", "pad_token": "<unk>", "unk_token": "<unk>", "use_default_system_prompt": false}, "peft": {"base_model_name_or_path": "01-ai/Yi-1.5-9B", "task_type": "CAUSAL_LM"}}
20,833
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:01-ai/Yi-1.5-9B", "base_model:adapter:01-ai/Yi-1.5-9B", "region:us" ]
null
peft
2024-09-08T20:00:07.000Z
silent666/01-ai-Yi-1.5-9B-1725825607
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "adapter_config.json" }, { "rfilename": "adapter_model.safetensors" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "training_args.bin" } ]
null
01-ai/Yi-1.5-9B
225
text-generation
225
0
0
0
0
916,071
66c4f4e7f77e8e56f3806368
bartowski/Phi-3.5-mini-instruct-GGUF
bartowski
False
not-popular-enough
2024-09-15T07:35:15.000Z
36
false
6d70da17e749a471ccb62ade694486011a75cda3
null
19,761
[ "transformers", "gguf", "nlp", "code", "text-generation", "multilingual", "base_model:microsoft/Phi-3.5-mini-instruct", "base_model:quantized:microsoft/Phi-3.5-mini-instruct", "license:mit", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-08-20T19:56:23.000Z
bartowski/Phi-3.5-mini-instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Phi-3.5-mini-instruct-IQ2_M.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-IQ3_M.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-IQ3_XS.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-IQ4_XS.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q2_K.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q2_K_L.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q3_K_L.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q3_K_M.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q3_K_S.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q3_K_XL.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_0.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_0_4_4.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_0_4_8.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_0_8_8.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_K_L.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_K_M.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q4_K_S.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q5_K_L.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q5_K_M.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q5_K_S.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q6_K.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q6_K_L.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-Q8_0.gguf" }, { "rfilename": "Phi-3.5-mini-instruct-f32.gguf" }, { "rfilename": "Phi-3.5-mini-instruct.imatrix" }, { "rfilename": "README.md" } ]
null
microsoft/Phi-3.5-mini-instruct
147
text-generation
147
0
0
0
0
842,257
6696a3096fa71ac401b70de0
princeton-nlp/gemma-2-9b-it-SimPO
princeton-nlp
False
not-popular-enough
2024-08-02T22:04:11.000Z
80
false
8c87091f412e3aa6f74f66bd86c57fb81cbc3fde
{"architectures": ["Gemma2ForCausalLM"], "model_type": "gemma2", "tokenizer_config": {"bos_token": "<bos>", "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] | trim + '\n\n' %}{% set messages = messages[1:] %}{% else %}{% set system_message = '' %}{% endif %}{% for message in messages %}{% if loop.index0 == 0 %}{% set content = system_message + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + content | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}", "eos_token": "<eos>", "pad_token": "<pad>", "unk_token": "<unk>", "use_default_system_prompt": false}}
19,520
[ "transformers", "safetensors", "gemma2", "text-generation", "alignment-handbook", "generated_from_trainer", "conversational", "dataset:princeton-nlp/gemma2-ultrafeedback-armorm", "arxiv:2405.14734", "base_model:google/gemma-2-9b-it", "base_model:finetune:google/gemma-2-9b-it", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-16T16:42:49.000Z
princeton-nlp/gemma-2-9b-it-SimPO
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "all_results.json" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" }, { "rfilename": "trainer_state.json" }, { "rfilename": "training_args.bin" } ]
gemma2
google/gemma-2-9b-it
131
text-generation
131
0
17
8
15
756,458
6661b70d346ffbe5a3d44175
Qwen/Qwen2-7B-Instruct-GGUF
Qwen
False
library-not-detected
2024-08-21T10:28:11.000Z
163
false
c3024c6fff0a02d52119ecee024bbb93d4b4b8e4
null
19,088
[ "gguf", "chat", "text-generation", "en", "base_model:Qwen/Qwen2-7B-Instruct", "base_model:quantized:Qwen/Qwen2-7B-Instruct", "license:apache-2.0", "region:us" ]
text-generation
null
2024-06-06T13:18:05.000Z
Qwen/Qwen2-7B-Instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "qwen2-7b-instruct-fp16.gguf" }, { "rfilename": "qwen2-7b-instruct-q2_k.gguf" }, { "rfilename": "qwen2-7b-instruct-q3_k_m.gguf" }, { "rfilename": "qwen2-7b-instruct-q4_0.gguf" }, { "rfilename": "qwen2-7b-instruct-q4_k_m.gguf" }, { "rfilename": "qwen2-7b-instruct-q5_0.gguf" }, { "rfilename": "qwen2-7b-instruct-q5_k_m.gguf" }, { "rfilename": "qwen2-7b-instruct-q6_k.gguf" }, { "rfilename": "qwen2-7b-instruct-q8_0.gguf" } ]
null
Qwen/Qwen2-7B-Instruct
161
text-generation
161
0
0
0
0
685,781
64f5fd954d3b1dd311d30e28
TheBloke/Llama-2-7B-GGUF
TheBloke
False
explicit-opt-out
2023-10-24T07:32:45.000Z
173
false
b4e04e128f421c93a5f1e34ac4d7ca9b0af47b80
{"model_type": "llama"}
18,814
[ "transformers", "gguf", "llama", "facebook", "meta", "pytorch", "llama-2", "text-generation", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-7b-hf", "base_model:quantized:meta-llama/Llama-2-7b-hf", "license:llama2", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-09-04T15:53:57.000Z
TheBloke/Llama-2-7B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "llama-2-7b.Q2_K.gguf" }, { "rfilename": "llama-2-7b.Q3_K_L.gguf" }, { "rfilename": "llama-2-7b.Q3_K_M.gguf" }, { "rfilename": "llama-2-7b.Q3_K_S.gguf" }, { "rfilename": "llama-2-7b.Q4_0.gguf" }, { "rfilename": "llama-2-7b.Q4_K_M.gguf" }, { "rfilename": "llama-2-7b.Q4_K_S.gguf" }, { "rfilename": "llama-2-7b.Q5_0.gguf" }, { "rfilename": "llama-2-7b.Q5_K_M.gguf" }, { "rfilename": "llama-2-7b.Q5_K_S.gguf" }, { "rfilename": "llama-2-7b.Q6_K.gguf" }, { "rfilename": "llama-2-7b.Q8_0.gguf" } ]
llama
meta-llama/Llama-2-7b-hf
1,595
text-generation
1,595
0
0
0
0
303,156
64b6c679d16e945399007406
TheBloke/Llama-2-7B-GPTQ
TheBloke
False
explicit-opt-out
2023-09-27T12:44:46.000Z
80
false
60b5c75d47a7d925782e74d16b6686cf0abbd052
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}}}
18,449
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-7b-hf", "base_model:quantized:meta-llama/Llama-2-7b-hf", "license:llama2", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-07-18T17:06:01.000Z
TheBloke/Llama-2-7B-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-2-7b-hf
1,595
text-generation
1,595
5
0
1
1
247,793
6631ab448d7c840b14eb5840
NousResearch/Hermes-2-Pro-Llama-3-8B-GGUF
NousResearch
False
library-not-detected
2024-05-03T14:21:35.000Z
154
false
fc0d3245797976d0effe82a4f6803a5814e6a4fe
null
18,414
[ "gguf", "Llama-3", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "en", "dataset:teknium/OpenHermes-2.5", "base_model:NousResearch/Meta-Llama-3-8B", "base_model:quantized:NousResearch/Meta-Llama-3-8B", "license:apache-2.0", "region:us" ]
null
null
2024-05-01T02:39:00.000Z
NousResearch/Hermes-2-Pro-Llama-3-8B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Hermes-2-Pro-Llama-3-8B-F16.gguf" }, { "rfilename": "Hermes-2-Pro-Llama-3-8B-Q4_K_M.gguf" }, { "rfilename": "Hermes-2-Pro-Llama-3-8B-Q5_K_M.gguf" }, { "rfilename": "Hermes-2-Pro-Llama-3-8B-Q6_K.gguf" }, { "rfilename": "Hermes-2-Pro-Llama-3-8B-Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
NousResearch/Meta-Llama-3-8B
101
text-generation
101
0
0
0
0
614,232
65780e5e353869cd6ef1d1ca
openchat/openchat-3.5-1210
openchat
False
not-popular-enough
2024-05-18T18:10:44.000Z
276
false
801f5459b7577241500785f11c2b026912badd6e
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}", "eos_token": "<|end_of_turn|>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
18,317
[ "transformers", "safetensors", "mistral", "text-generation", "openchat", "C-RLFT", "conversational", "dataset:openchat/openchat_sharegpt4_dataset", "dataset:kaist-ai/Feedback-Collection", "dataset:imone/OpenOrca_FLAN", "dataset:LDJnr/Capybara", "dataset:tiedong/goat", "dataset:glaiveai/glaive-code-assistant", "dataset:meta-math/MetaMathQA", "dataset:OpenAssistant/oasst_top1_2023-08-25", "dataset:TIGER-Lab/MathInstruct", "arxiv:2309.11235", "arxiv:2303.08774", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-12-12T07:40:14.000Z
openchat/openchat-3.5-1210
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00003.safetensors" }, { "rfilename": "model-00002-of-00003.safetensors" }, { "rfilename": "model-00003-of-00003.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "openchat.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
8
14
11
5
415,184
66286775d59deb30bb03684f
maum-ai/Llama-3-MAAL-8B-Instruct-v0.1
maum-ai
False
not-popular-enough
2024-04-30T12:25:15.000Z
31
false
f6e6495a184ac245c8e3c953ce6acb8f732c52dd
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|end_of_text|>", "pad_token": "<<|PAD|>>"}}
18,172
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "llama-3", "llama-3-ko", "conversational", "en", "ko", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:finetune:meta-llama/Meta-Llama-3-8B-Instruct", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-24T01:59:17.000Z
maum-ai/Llama-3-MAAL-8B-Instruct-v0.1
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B-Instruct
1,076
text-generation
1,076
0
12
0
0
601,965
66aa7b285bb7aca6cc409418
lmstudio-community/gemma-2-2b-it-GGUF
lmstudio-community
False
not-popular-enough
2024-07-31T18:58:29.000Z
16
false
6aa72da804ad76c5dc862867bfba6256de9172c7
null
16,615
[ "transformers", "gguf", "conversational", "text-generation", "base_model:google/gemma-2-2b-it", "base_model:quantized:google/gemma-2-2b-it", "license:gemma", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-31T17:58:00.000Z
lmstudio-community/gemma-2-2b-it-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "gemma-2-2b-it-IQ3_M.gguf" }, { "rfilename": "gemma-2-2b-it-IQ4_XS.gguf" }, { "rfilename": "gemma-2-2b-it-Q3_K_L.gguf" }, { "rfilename": "gemma-2-2b-it-Q4_K_M.gguf" }, { "rfilename": "gemma-2-2b-it-Q5_K_M.gguf" }, { "rfilename": "gemma-2-2b-it-Q6_K.gguf" }, { "rfilename": "gemma-2-2b-it-Q8_0.gguf" }, { "rfilename": "gemma-2-2b-it-f32.gguf" } ]
null
google/gemma-2-2b-it
265
text-generation
265
0
0
0
0
791,420
65eeff3ffc59f6e77eafb754
NousResearch/Hermes-2-Pro-Mistral-7B
NousResearch
False
not-popular-enough
2024-09-08T08:08:34.000Z
480
false
24dbda51d986efa05201eec7a093ac91fc5519cf
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": [{"name": "default", "template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"}, {"name": "tool_use", "template": "{%- macro json_to_python_type(json_spec) %}\n{%- set basic_type_map = {\n \"string\": \"str\",\n \"number\": \"float\",\n \"integer\": \"int\",\n \"boolean\": \"bool\"\n} %}\n\n{%- if basic_type_map[json_spec.type] is defined %}\n {{- basic_type_map[json_spec.type] }}\n{%- elif json_spec.type == \"array\" %}\n {{- \"list[\" + json_to_python_type(json_spec|items) + \"]\"}}\n{%- elif json_spec.type == \"object\" %}\n {%- if json_spec.additionalProperties is defined %}\n {{- \"dict[str, \" + json_to_python_type(json_spec.additionalProperties) + ']'}}\n {%- else %}\n {{- \"dict\" }}\n {%- endif %}\n{%- elif json_spec.type is iterable %}\n {{- \"Union[\" }}\n {%- for t in json_spec.type %}\n {{- json_to_python_type({\"type\": t}) }}\n {%- if not loop.last %}\n {{- \",\" }} \n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n{%- else %}\n {{- \"Any\" }}\n{%- endif %}\n{%- endmacro %}\n\n\n{{- bos_token }}\n{{- '<|im_start|>system\n' }}\n{{- \"You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: <tools> \" }}\n{%- for tool in tools %}\n {%- if tool.function is defined %}\n {%- set tool = tool.function %}\n {%- endif %}\n {{- '{\"type\": \"function\", \"function\": ' }}\n {{- '{\"name\": \"' + tool.name + '\", ' }}\n {{- '\"description\": \"' + tool.name + '(' }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {{- param_name + \": \" + json_to_python_type(param_fields) }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- if tool.return is defined %}\n {{- \" -> \" + json_to_python_type(tool.return) }}\n {%- endif %}\n {{- \" - \" + tool.description + \"\n\n\" }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {%- if loop.first %}\n {{- \" Args:\n\" }}\n {%- endif %}\n {{- \" \" + param_name + \"(\" + json_to_python_type(param_fields) + \"): \" + param_fields.description|trim }}\n {%- endfor %}\n {%- if tool.return is defined and tool.return.description is defined %}\n {{- \"\n Returns:\n \" + tool.return.description }}\n {%- endif %}\n {{- '\"' }}\n {{- ', \"parameters\": ' }}\n {%- if tool.parameters.properties | length == 0 %}\n {{- \"{}\" }}\n {%- else %}\n {{- tool.parameters|tojson }}\n {%- endif %}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \"\n\" }}\n {%- endif %}\n{%- endfor %}\n{{- \" </tools>\" }}\n{{- 'Use the following pydantic model json schema for each tool call you will make: {\"properties\": {\"name\": {\"title\": \"Name\", \"type\": \"string\"}, \"arguments\": {\"title\": \"Arguments\", \"type\": \"object\"}}, \"required\": [\"name\", \"arguments\"], \"title\": \"FunctionCall\", \"type\": \"object\"}}\n' }}\n{{- \"For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n\" }}\n{{- \"<tool_call>\n\" }}\n{{- '{\"name\": <function-name>, \"arguments\": <args-dict>}\n' }}\n{{- '</tool_call><|im_end|>\n' }}\n{%- for message in messages %}\n {%- if message.role == \"user\" or message.role == \"system\" or (message.role == \"assistant\" and message.tool_calls is not defined) %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- for tool_call in message.tool_calls %}\n {{- '\n<tool_call>\n' }} {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '{' }}\n {{- '\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\"' }}\n {{- ', '}}\n {%- if tool_call.arguments is defined %}\n {{- '\"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments|tojson }}\n {%- endif %}\n {%- endif %}\n {{- '}' }}\n {{- '\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.previtem and loop.previtem.role != \"tool\" %}\n {{- '<|im_start|>tool\n' }}\n {%- endif %}\n {{- '<tool_response>\n' }}\n {{- message.content }}\n {%- if not loop.last %}\n {{- '\n</tool_response>\n' }}\n {%- else %}\n {{- '\n</tool_response>' }}\n {%- endif %}\n {%- if not loop.last and loop.nextitem.role != \"tool\" %}\n {{- '<|im_end|>' }}\n {%- elif loop.last %}\n {{- '<|im_end|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}\n"}], "eos_token": "<|im_end|>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": false}}
16,543
[ "transformers", "safetensors", "mistral", "text-generation", "Mistral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "conversational", "en", "dataset:teknium/OpenHermes-2.5", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-03-11T12:55:27.000Z
NousResearch/Hermes-2-Pro-Mistral-7B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
2
94
14
15
530,374
66dd9c3efd9b7f48c89ca228
amiguel/classItem-FT-llama-3-1-8b-instruct
amiguel
False
not-popular-enough
2024-09-08T12:47:25.000Z
0
false
82d8630451db886a42309d2088f6c5b1d9d3a3e3
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"load_in_4bit": true, "load_in_8bit": false, "quant_method": "bitsandbytes"}, "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", "eos_token": "<|eot_id|>", "pad_token": "<|finetune_right_pad_id|>"}}
16,524
[ "transformers", "safetensors", "llama", "text-generation", "text-generation-inference", "unsloth", "trl", "sft", "conversational", "en", "base_model:unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit", "base_model:quantized:unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "4-bit", "bitsandbytes", "region:us" ]
text-generation
transformers
2024-09-08T12:44:46.000Z
amiguel/classItem-FT-llama-3-1-8b-instruct
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
unsloth/Meta-Llama-3.1-8B-Instruct-bnb-4bit
661
text-generation
661
0
0
0
0
914,454
64b6d9d4bb1d8f2832c8be93
TheBloke/Llama-2-13B-chat-GPTQ
TheBloke
False
explicit-opt-out
2023-09-27T12:44:48.000Z
358
false
ea078917a7e91c896787c73dba935f032ae658e9
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false}}}
16,056
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-13b-chat-hf", "base_model:quantized:meta-llama/Llama-2-13b-chat-hf", "license:llama2", "autotrain_compatible", "text-generation-inference", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2023-07-18T18:28:36.000Z
TheBloke/Llama-2-13B-chat-GPTQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-2-13b-chat-hf
318
text-generation
318
5
0
0
0
247,882
66693891e06cc524750de8f7
QuantFactory/Qwen2-0.5B-GGUF
QuantFactory
False
library-not-detected
2024-06-18T06:31:15.000Z
2
false
2a3c6a4d2fa2077c6a3045867ac18b1933fef5be
null
15,865
[ "gguf", "pretrained", "text-generation", "en", "base_model:Qwen/Qwen2-0.5B", "base_model:quantized:Qwen/Qwen2-0.5B", "license:apache-2.0", "region:us" ]
text-generation
null
2024-06-12T05:56:33.000Z
QuantFactory/Qwen2-0.5B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Qwen2-0.5B.Q2_K.gguf" }, { "rfilename": "Qwen2-0.5B.Q3_K_L.gguf" }, { "rfilename": "Qwen2-0.5B.Q3_K_M.gguf" }, { "rfilename": "Qwen2-0.5B.Q3_K_S.gguf" }, { "rfilename": "Qwen2-0.5B.Q4_0.gguf" }, { "rfilename": "Qwen2-0.5B.Q4_1.gguf" }, { "rfilename": "Qwen2-0.5B.Q4_K_M.gguf" }, { "rfilename": "Qwen2-0.5B.Q4_K_S.gguf" }, { "rfilename": "Qwen2-0.5B.Q5_0.gguf" }, { "rfilename": "Qwen2-0.5B.Q5_1.gguf" }, { "rfilename": "Qwen2-0.5B.Q5_K_M.gguf" }, { "rfilename": "Qwen2-0.5B.Q5_K_S.gguf" }, { "rfilename": "Qwen2-0.5B.Q6_K.gguf" }, { "rfilename": "Qwen2-0.5B.Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
Qwen/Qwen2-0.5B
120
text-generation
120
0
0
0
0
695,554
66a4fbbe07dd6d0ab4a07650
GPT4All-Community/Meta-Llama-3.1-8B-Instruct-128k-GGUF
GPT4All-Community
False
library-not-detected
2024-08-12T23:51:19.000Z
8
false
350b6d7f3a2224c98b6dc8ebdce0e290b71cae22
null
15,300
[ "gguf", "text-generation-inference", "text-generation", "base_model:meta-llama/Llama-3.1-8B-Instruct", "base_model:quantized:meta-llama/Llama-3.1-8B-Instruct", "license:llama3.1", "region:us" ]
text-generation
null
2024-07-27T13:53:02.000Z
GPT4All-Community/Meta-Llama-3.1-8B-Instruct-128k-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Meta-Llama-3.1-8B-Instruct-128k-Q4_0.gguf" }, { "rfilename": "Meta-Llama-3.1-8B-Instruct-128k-f16.gguf" }, { "rfilename": "README.md" } ]
null
meta-llama/Llama-3.1-8B-Instruct
695
text-generation
695
0
0
0
0
781,715
6508c16eabdde5290e606c80
TheBloke/Llama-2-7B-Chat-AWQ
TheBloke
False
explicit-opt-out
2023-11-09T18:21:14.000Z
21
false
ee91097dc818c742d9b648147891dfadd9638ace
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"quant_method": "awq", "bits": 4}, "tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}}}
14,453
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-7b-chat-hf", "base_model:quantized:meta-llama/Llama-2-7b-chat-hf", "license:llama2", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-09-18T21:30:22.000Z
TheBloke/Llama-2-7B-Chat-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-2-7b-chat-hf
1,328
text-generation
1,328
0
0
0
0
317,895
65d445c10e77d4f229c98fdc
NousResearch/Nous-Hermes-2-Mistral-7B-DPO-GGUF
NousResearch
False
library-not-detected
2024-02-21T01:35:04.000Z
59
false
eb85cf06e8663157611e8ee472e61b43f50ee49f
null
14,221
[ "gguf", "Mistral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "en", "dataset:teknium/OpenHermes-2.5", "base_model:mistralai/Mistral-7B-v0.1", "base_model:quantized:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "region:us" ]
null
null
2024-02-20T06:25:05.000Z
NousResearch/Nous-Hermes-2-Mistral-7B-DPO-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q2_K.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q3_K_L.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q3_K_M.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q3_K_S.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q4_0.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q4_K_M.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q4_K_S.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q5_0.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q5_K_M.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q5_K_S.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q6_K.gguf" }, { "rfilename": "Nous-Hermes-2-Mistral-7B-DPO.Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
0
0
0
0
500,060
6627253888596f571aedfb4a
lightblue/suzume-llama-3-8B-multilingual
lightblue
False
not-popular-enough
2024-06-02T02:14:24.000Z
103
false
0cb15aa9ec685eef494f9a15f65aefcfe3c04c66
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|end_of_text|>", "pad_token": "<|end_of_text|>"}}
13,616
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "generated_from_trainer", "conversational", "arxiv:2405.12612", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:finetune:meta-llama/Meta-Llama-3-8B-Instruct", "license:other", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-23T03:04:24.000Z
lightblue/suzume-llama-3-8B-multilingual
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model.bin" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B-Instruct
1,076
text-generation
1,076
0
3
7
10
600,092
666283eda5e2fb9d6b7336c5
gaianet/Qwen2-0.5B-Instruct-GGUF
gaianet
False
not-popular-enough
2024-06-07T04:09:30.000Z
1
false
bf2944f900c00233d7d1251f4584616cbb06883e
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2"}
13,032
[ "transformers", "gguf", "qwen2", "text-generation", "chat", "en", "base_model:Qwen/Qwen2-0.5B-Instruct", "base_model:quantized:Qwen/Qwen2-0.5B-Instruct", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-06-07T03:52:13.000Z
gaianet/Qwen2-0.5B-Instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Qwen2-0.5B-Instruct-Q2_K.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q3_K_L.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q3_K_M.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q3_K_S.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q4_0.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q4_K_M.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q4_K_S.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q5_0.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q5_K_M.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q5_K_S.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q6_K.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-Q8_0.gguf" }, { "rfilename": "Qwen2-0.5B-Instruct-f16.gguf" }, { "rfilename": "README.md" }, { "rfilename": "config.json" } ]
qwen2
Qwen/Qwen2-0.5B-Instruct
178
text-generation
178
0
0
0
0
686,950
66374df5650f76807b257b98
NousResearch/Hermes-2-Theta-Llama-3-8B
NousResearch
False
not-popular-enough
2024-09-08T08:11:20.000Z
192
false
57a73110702e7b05ba3f39fef36297454c680725
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": [{"name": "default", "template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"}, {"name": "tool_use", "template": "{%- macro json_to_python_type(json_spec) %}\n{%- set basic_type_map = {\n \"string\": \"str\",\n \"number\": \"float\",\n \"integer\": \"int\",\n \"boolean\": \"bool\"\n} %}\n\n{%- if basic_type_map[json_spec.type] is defined %}\n {{- basic_type_map[json_spec.type] }}\n{%- elif json_spec.type == \"array\" %}\n {{- \"list[\" + json_to_python_type(json_spec|items) + \"]\"}}\n{%- elif json_spec.type == \"object\" %}\n {%- if json_spec.additionalProperties is defined %}\n {{- \"dict[str, \" + json_to_python_type(json_spec.additionalProperties) + ']'}}\n {%- else %}\n {{- \"dict\" }}\n {%- endif %}\n{%- elif json_spec.type is iterable %}\n {{- \"Union[\" }}\n {%- for t in json_spec.type %}\n {{- json_to_python_type({\"type\": t}) }}\n {%- if not loop.last %}\n {{- \",\" }} \n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n{%- else %}\n {{- \"Any\" }}\n{%- endif %}\n{%- endmacro %}\n\n\n{{- bos_token }}\n{{- '<|im_start|>system\n' }}\n{{- \"You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: <tools> \" }}\n{%- for tool in tools %}\n {%- if tool.function is defined %}\n {%- set tool = tool.function %}\n {%- endif %}\n {{- '{\"type\": \"function\", \"function\": ' }}\n {{- '{\"name\": \"' + tool.name + '\", ' }}\n {{- '\"description\": \"' + tool.name + '(' }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {{- param_name + \": \" + json_to_python_type(param_fields) }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- if tool.return is defined %}\n {{- \" -> \" + json_to_python_type(tool.return) }}\n {%- endif %}\n {{- \" - \" + tool.description + \"\n\n\" }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {%- if loop.first %}\n {{- \" Args:\n\" }}\n {%- endif %}\n {{- \" \" + param_name + \"(\" + json_to_python_type(param_fields) + \"): \" + param_fields.description|trim }}\n {%- endfor %}\n {%- if tool.return is defined and tool.return.description is defined %}\n {{- \"\n Returns:\n \" + tool.return.description }}\n {%- endif %}\n {{- '\"' }}\n {{- ', \"parameters\": ' }}\n {%- if tool.parameters.properties | length == 0 %}\n {{- \"{}\" }}\n {%- else %}\n {{- tool.parameters|tojson }}\n {%- endif %}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \"\n\" }}\n {%- endif %}\n{%- endfor %}\n{{- \" </tools>\" }}\n{{- 'Use the following pydantic model json schema for each tool call you will make: {\"properties\": {\"name\": {\"title\": \"Name\", \"type\": \"string\"}, \"arguments\": {\"title\": \"Arguments\", \"type\": \"object\"}}, \"required\": [\"name\", \"arguments\"], \"title\": \"FunctionCall\", \"type\": \"object\"}}\n' }}\n{{- \"For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n\" }}\n{{- \"<tool_call>\n\" }}\n{{- '{\"name\": <function-name>, \"arguments\": <args-dict>}\n' }}\n{{- '</tool_call><|im_end|>\n' }}\n{%- for message in messages %}\n {%- if message.role == \"user\" or message.role == \"system\" or (message.role == \"assistant\" and message.tool_calls is not defined) %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- for tool_call in message.tool_calls %}\n {{- '\n<tool_call>\n' }} {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '{' }}\n {{- '\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\"' }}\n {{- ', '}}\n {%- if tool_call.arguments is defined %}\n {{- '\"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments|tojson }}\n {%- endif %}\n {%- endif %}\n {{- '}' }}\n {{- '\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.previtem and loop.previtem.role != \"tool\" %}\n {{- '<|im_start|>tool\n' }}\n {%- endif %}\n {{- '<tool_response>\n' }}\n {{- message.content }}\n {%- if not loop.last %}\n {{- '\n</tool_response>\n' }}\n {%- else %}\n {{- '\n</tool_response>' }}\n {%- endif %}\n {%- if not loop.last and loop.nextitem.role != \"tool\" %}\n {{- '<|im_end|>' }}\n {%- elif loop.last %}\n {{- '<|im_end|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}\n"}], "eos_token": "<|im_end|>", "pad_token": "<|end_of_text|>"}}
13,026
[ "transformers", "safetensors", "llama", "text-generation", "Llama-3", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "axolotl", "merges", "conversational", "en", "dataset:teknium/OpenHermes-2.5", "base_model:NousResearch/Hermes-2-Pro-Llama-3-8B", "base_model:finetune:NousResearch/Hermes-2-Pro-Llama-3-8B", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-05T09:14:29.000Z
NousResearch/Hermes-2-Theta-Llama-3-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
NousResearch/Hermes-2-Pro-Llama-3-8B
131
text-generation
131
0
23
1
9
621,829
66615487a4c55c16aceb4423
Qwen/Qwen2-1.5B-Instruct-AWQ
Qwen
False
not-popular-enough
2024-08-21T10:35:28.000Z
7
false
688a0bab90cef22c0a5fad44dbda2b65a8859147
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "quantization_config": {"bits": 4, "quant_method": "awq"}, "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": null}}
12,374
[ "transformers", "safetensors", "qwen2", "text-generation", "chat", "conversational", "en", "base_model:Qwen/Qwen2-1.5B-Instruct", "base_model:quantized:Qwen/Qwen2-1.5B-Instruct", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "awq", "region:us" ]
text-generation
transformers
2024-06-06T06:17:43.000Z
Qwen/Qwen2-1.5B-Instruct-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model.safetensors" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen2-1.5B-Instruct
495
text-generation
495
0
0
0
0
685,151
663e0c516757087d1abb4ab2
MaziyarPanahi/calme-2.3-phi3-4b
MaziyarPanahi
False
explicit-opt-out
2024-07-20T13:39:11.000Z
9
false
2f0dfbb0426ae75b0106f260658ca155b99a8df5
{"architectures": ["Phi3ForCausalLM"], "auto_map": {"AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM"}, "model_type": "phi3", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": "<unk>", "use_default_system_prompt": false}}
12,004
[ "transformers", "safetensors", "phi3", "text-generation", "axolotl", "finetune", "dpo", "microsoft", "phi", "pytorch", "phi-3", "nlp", "code", "chatml", "conversational", "custom_code", "en", "base_model:microsoft/Phi-3-mini-4k-instruct", "base_model:finetune:microsoft/Phi-3-mini-4k-instruct", "license:mit", "model-index", "autotrain_compatible", "text-generation-inference", "region:us" ]
text-generation
transformers
2024-05-10T12:00:17.000Z
MaziyarPanahi/calme-2.3-phi3-4b
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "phi-3-instruct.webp" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
phi3
microsoft/Phi-3-mini-4k-instruct
470
text-generation
470
0
0
3
3
631,158
66214d85acbb21038cda695a
QuantFactory/Meta-Llama-3-8B-GGUF
QuantFactory
False
library-not-detected
2024-04-20T16:20:22.000Z
107
false
1ca85c857dce892b673b988ad0aa83f2cb1bbd19
null
11,654
[ "gguf", "facebook", "meta", "pytorch", "llama", "llama-3", "text-generation", "en", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:quantized:meta-llama/Meta-Llama-3-8B", "license:other", "region:us" ]
text-generation
null
2024-04-18T16:42:45.000Z
QuantFactory/Meta-Llama-3-8B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Meta-Llama-3-8B.Q2_K.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q3_K_L.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q3_K_M.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q3_K_S.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q4_0.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q4_1.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q4_K_M.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q4_K_S.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q5_0.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q5_1.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q5_K_M.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q5_K_S.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q6_K.gguf" }, { "rfilename": "Meta-Llama-3-8B.Q8_0.gguf" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" } ]
null
meta-llama/Meta-Llama-3-8B
976
text-generation
976
0
0
0
0
592,376
66615424221a94f56bb9032e
Qwen/Qwen2-1.5B-Instruct-GPTQ-Int4
Qwen
False
not-popular-enough
2024-08-21T10:36:01.000Z
3
false
b2e2d7e4336e61901d421296d0a1f418b1a93ae7
{"architectures": ["Qwen2ForCausalLM"], "model_type": "qwen2", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|endoftext|>", "unk_token": null}}
11,516
[ "transformers", "safetensors", "qwen2", "text-generation", "chat", "conversational", "en", "base_model:Qwen/Qwen2-1.5B-Instruct", "base_model:quantized:Qwen/Qwen2-1.5B-Instruct", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2024-06-06T06:16:04.000Z
Qwen/Qwen2-1.5B-Instruct-GPTQ-Int4
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "merges.txt" }, { "rfilename": "model.safetensors" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "vocab.json" } ]
qwen2
Qwen/Qwen2-1.5B-Instruct
495
text-generation
495
0
0
0
0
685,145
6639c903c168eec0239c8801
openchat/openchat-3.6-8b-20240522
openchat
False
not-popular-enough
2024-05-28T05:23:57.000Z
145
false
2264eb98558978f708e88ae52afb78e43b832801
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"chat_template": "{{ bos_token }}{% for message in messages %}{% if message['role'] in ['user', 'assistant'] %}{% set content = '<|start_header_id|>GPT4 Correct ' + message['role'].title() + '<|end_header_id|>\n\n' + message['content'] | trim + '<|eot_id|>' %}{% elif message['role'] == 'system' %}{% set content = '<|start_header_id|>System<|end_header_id|>\n\n' + message['content'] | trim + '<|eot_id|>' %}{% else %}{{ raise_exception('Only user, assistant and system roles are supported!') }}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>GPT4 Correct Assistant<|end_header_id|>\n\n' }}{% endif %}", "bos_token": "<|begin_of_text|>", "eos_token": "<|eot_id|>"}}
11,394
[ "transformers", "safetensors", "llama", "text-generation", "openchat", "llama3", "C-RLFT", "conversational", "arxiv:2309.11235", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:finetune:meta-llama/Meta-Llama-3-8B", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-07T06:24:03.000Z
openchat/openchat-3.6-8b-20240522
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "openchat.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B
976
text-generation
976
1
21
9
9
625,458
65441824276a59f143ebfe1e
TheBloke/OpenHermes-2.5-Mistral-7B-AWQ
TheBloke
False
explicit-opt-out
2023-11-09T18:16:14.000Z
20
false
a0e39e3a0ca6767307293819acb7c38e7a18cd31
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"quant_method": "awq", "bits": 4}, "tokenizer_config": {"bos_token": "<s>", "eos_token": "<|im_end|>", "pad_token": null, "unk_token": "<unk>", "use_default_system_prompt": true}}
11,221
[ "transformers", "safetensors", "mistral", "text-generation", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "en", "base_model:teknium/OpenHermes-2.5-Mistral-7B", "base_model:quantized:teknium/OpenHermes-2.5-Mistral-7B", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-11-02T21:44:04.000Z
TheBloke/OpenHermes-2.5-Mistral-7B-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
teknium/OpenHermes-2.5-Mistral-7B
202
text-generation
202
0
0
0
0
365,296
6621ee4f73c423da5948c5bb
astronomer/Llama-3-8B-Instruct-GPTQ-8-Bit
astronomer
False
explicit-opt-out
2024-04-22T01:31:44.000Z
25
false
d7395bd8b015df3f7cb1b2e929d84d3a9f9bc17d
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 8, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|end_of_text|>"}}
10,497
[ "transformers", "safetensors", "llama", "text-generation", "llama-3", "facebook", "meta", "astronomer", "gptq", "pretrained", "quantized", "finetuned", "autotrain_compatible", "endpoints_compatible", "conversational", "dataset:wikitext", "arxiv:2210.17323", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:quantized:meta-llama/Meta-Llama-3-8B-Instruct", "license:other", "text-generation-inference", "8-bit", "region:us" ]
text-generation
transformers
2024-04-19T04:08:47.000Z
astronomer/Llama-3-8B-Instruct-GPTQ-8-Bit
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B-Instruct
1,076
text-generation
1,076
1
0
0
0
593,246
64f611cf6ad07ea81779ca15
TheBloke/Llama-2-13B-chat-GGUF
TheBloke
False
explicit-opt-out
2023-09-27T12:47:12.000Z
190
false
4458acc949de0a9914c3eab623904d4fe999050a
{"model_type": "llama"}
10,488
[ "transformers", "gguf", "llama", "facebook", "meta", "pytorch", "llama-2", "text-generation", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-13b-chat-hf", "base_model:quantized:meta-llama/Llama-2-13b-chat-hf", "license:llama2", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-09-04T17:20:15.000Z
TheBloke/Llama-2-13B-chat-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "llama-2-13b-chat.Q2_K.gguf" }, { "rfilename": "llama-2-13b-chat.Q3_K_L.gguf" }, { "rfilename": "llama-2-13b-chat.Q3_K_M.gguf" }, { "rfilename": "llama-2-13b-chat.Q3_K_S.gguf" }, { "rfilename": "llama-2-13b-chat.Q4_0.gguf" }, { "rfilename": "llama-2-13b-chat.Q4_K_M.gguf" }, { "rfilename": "llama-2-13b-chat.Q4_K_S.gguf" }, { "rfilename": "llama-2-13b-chat.Q5_0.gguf" }, { "rfilename": "llama-2-13b-chat.Q5_K_M.gguf" }, { "rfilename": "llama-2-13b-chat.Q5_K_S.gguf" }, { "rfilename": "llama-2-13b-chat.Q6_K.gguf" }, { "rfilename": "llama-2-13b-chat.Q8_0.gguf" } ]
llama
meta-llama/Llama-2-13b-chat-hf
318
text-generation
318
0
0
0
0
303,231
65e2a24775b43b925e680c3c
ibm/merlinite-7b
ibm
False
not-popular-enough
2024-03-05T14:01:15.000Z
103
false
233d12759d5bb9344231dafdb51310ec19d79c0e
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "eos_token": "<|endoftext|>", "pad_token": "<|pad|>", "unk_token": "<unk>", "use_default_system_prompt": false}}
10,431
[ "transformers", "safetensors", "mistral", "text-generation", "merlinite", "ibm", "lab", "labrador", "labradorite", "en", "arxiv:2403.01081", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-03-02T03:51:35.000Z
ibm/merlinite-7b
[ { "rfilename": ".gitattributes" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/Screenshot_2024-02-22_at_11.26.13_AM.png" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/Untitled 1.png" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/Untitled 2.png" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/Untitled.png" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/intuition.png" }, { "rfilename": "Model Card for Merlinite 7b 28cc0b72cf574a4a828140d3539ede4a/test.md" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00003.safetensors" }, { "rfilename": "model-00002-of-00003.safetensors" }, { "rfilename": "model-00003-of-00003.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "paper.pdf" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
0
13
0
5
516,102
653a6d1e024b6cd30ff0afa4
HuggingFaceH4/mistral-7b-sft-beta
HuggingFaceH4
False
not-popular-enough
2024-09-24T15:48:08.000Z
23
false
d7b9d226fb91df9de570ec0ee119f46e007f6142
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
10,319
[ "transformers", "pytorch", "tensorboard", "mistral", "text-generation", "generated_from_trainer", "conversational", "en", "dataset:HuggingFaceH4/ultrachat_200k", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:mit", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-26T13:43:58.000Z
HuggingFaceH4/mistral-7b-sft-beta
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "all_results.json" }, { "rfilename": "config.json" }, { "rfilename": "dialogue_template.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "pytorch_model-00001-of-00002.bin" }, { "rfilename": "pytorch_model-00002-of-00002.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "runs/Oct20_10-10-24_ip-26-0-147-245/events.out.tfevents.1697796872.ip-26-0-147-245.2400625.0" }, { "rfilename": "runs/Oct20_10-10-24_ip-26-0-147-245/events.out.tfevents.1697803248.ip-26-0-147-245.2400625.1" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" }, { "rfilename": "trainer_state.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
14
0
150
1
357,327
66bb66c6f1dfeeafae30af57
DeepMount00/Llama-3.1-8b-Ita
DeepMount00
False
not-popular-enough
2024-08-22T13:46:47.000Z
3
false
5ede1e388b6b15bc06acd364a8f805fe9ed16db9
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", "eos_token": "<|eot_id|>"}}
10,302
[ "transformers", "safetensors", "llama", "text-generation", "conversational", "en", "it", "base_model:meta-llama/Llama-3.1-8B-Instruct", "base_model:finetune:meta-llama/Llama-3.1-8B-Instruct", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-08-13T13:59:34.000Z
DeepMount00/Llama-3.1-8b-Ita
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-3.1-8B-Instruct
695
text-generation
695
0
0
0
1
819,436
66b05b89a059e1ba1ea78ec1
NousResearch/Hermes-3-Llama-3.1-8B-GGUF
NousResearch
False
library-not-detected
2024-08-23T01:22:33.000Z
76
false
307a5dfb59aa38d88b6cfd32f44b8ad7c1da9fb8
null
10,291
[ "gguf", "Llama-3", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "axolotl", "roleplaying", "chat", "en", "arxiv:2408.11857", "base_model:meta-llama/Llama-3.1-8B", "base_model:quantized:meta-llama/Llama-3.1-8B", "license:llama3", "region:us" ]
null
null
2024-08-05T04:56:41.000Z
NousResearch/Hermes-3-Llama-3.1-8B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Hermes-3-Llama-3.1-8B.Q4_K_M.gguf" }, { "rfilename": "Hermes-3-Llama-3.1-8B.Q5_K_M.gguf" }, { "rfilename": "Hermes-3-Llama-3.1-8B.Q6_K.gguf" }, { "rfilename": "Hermes-3-Llama-3.1-8B.Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
meta-llama/Llama-3.1-8B
254
text-generation
254
0
0
0
0
800,764
652851f32e793f1328fb1657
teknium/OpenHermes-2-Mistral-7B
teknium
False
not-popular-enough
2023-11-02T21:18:17.000Z
255
false
4c6e34123b140ce773a8433cae5410949289102c
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
10,113
[ "transformers", "pytorch", "mistral", "text-generation", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "conversational", "en", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-12T20:07:15.000Z
teknium/OpenHermes-2-Mistral-7B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "pytorch_model-00001-of-00002.bin" }, { "rfilename": "pytorch_model-00002-of-00002.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
1
1
4
5
341,614
654033397b2743b58fa41edc
cognitivecomputations/dolphin-2.2.1-mistral-7b
cognitivecomputations
False
not-popular-enough
2024-05-20T14:50:39.000Z
191
false
20f78ab87598cab137c8ce00855464cae403a3fd
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
10,027
[ "transformers", "pytorch", "safetensors", "mistral", "text-generation", "conversational", "en", "dataset:ehartford/dolphin", "dataset:jondurbin/airoboros-2.2.1", "base_model:mistralai/Mistral-7B-v0.1", "base_model:finetune:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2023-10-30T22:50:33.000Z
cognitivecomputations/dolphin-2.2.1-mistral-7b
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "added_tokens.json" }, { "rfilename": "config.json" }, { "rfilename": "configs/dolphin-mistral-7b.yml" }, { "rfilename": "generation_config.json" }, { "rfilename": "latest" }, { "rfilename": "model-00001-of-00002.safetensors" }, { "rfilename": "model-00002-of-00002.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "pytorch_model-00001-of-00002.bin" }, { "rfilename": "pytorch_model-00002-of-00002.bin" }, { "rfilename": "pytorch_model.bin.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "trainer_state.json" }, { "rfilename": "training_args.bin" }, { "rfilename": "zero_to_fp32.py" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
3
11
1
7
361,934
6699401f2ef5162d0d0ed712
second-state/Mistral-Nemo-Instruct-2407-GGUF
second-state
False
explicit-opt-out
2024-07-24T02:12:13.000Z
63
false
58e0a77adf0ae4e2d4412e415923a8fc008bbe08
{"architectures": ["MistralForCausalLM"], "model_type": "mistral"}
9,912
[ "transformers", "gguf", "mistral", "text-generation", "en", "fr", "de", "es", "it", "pt", "ru", "zh", "ja", "base_model:mistralai/Mistral-Nemo-Instruct-2407", "base_model:quantized:mistralai/Mistral-Nemo-Instruct-2407", "license:apache-2.0", "autotrain_compatible", "text-generation-inference", "region:us" ]
text-generation
transformers
2024-07-18T16:17:35.000Z
second-state/Mistral-Nemo-Instruct-2407-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q2_K.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q3_K_L.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q3_K_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q3_K_S.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q4_0.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q4_K_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q4_K_S.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q5_0.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q5_K_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q5_K_S.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q6_K.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q8_0.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-f16.gguf" }, { "rfilename": "README.md" }, { "rfilename": "config.json" } ]
mistral
mistralai/Mistral-Nemo-Instruct-2407
101
text-generation
101
0
0
0
0
761,615
654418242996405c238f5eea
TheBloke/OpenHermes-2.5-Mistral-7B-GGUF
TheBloke
False
explicit-opt-out
2023-11-02T21:48:38.000Z
227
false
5682e25bb033d9d21f6d159859e21df4552c1f26
{"model_type": "mistral"}
9,817
[ "transformers", "gguf", "mistral", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "distillation", "en", "base_model:teknium/OpenHermes-2.5-Mistral-7B", "base_model:quantized:teknium/OpenHermes-2.5-Mistral-7B", "license:apache-2.0", "text-generation-inference", "region:us" ]
null
transformers
2023-11-02T21:44:04.000Z
TheBloke/OpenHermes-2.5-Mistral-7B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "openhermes-2.5-mistral-7b.Q2_K.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q3_K_L.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q3_K_M.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q3_K_S.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q4_0.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q4_K_M.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q4_K_S.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q5_0.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q5_K_M.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q5_K_S.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q6_K.gguf" }, { "rfilename": "openhermes-2.5-mistral-7b.Q8_0.gguf" } ]
mistral
teknium/OpenHermes-2.5-Mistral-7B
202
text-generation
202
0
0
1
0
365,297
66a3f1c5ee3de8c56ef34fa3
neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16
neuralmagic
False
not-popular-enough
2024-09-27T17:20:06.000Z
19
false
28860714d240158c59c8b7c364a658d6d353aa03
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", "eos_token": "<|eot_id|>"}}
9,796
[ "transformers", "safetensors", "llama", "text-generation", "int4", "vllm", "conversational", "en", "de", "fr", "it", "pt", "hi", "es", "th", "base_model:meta-llama/Llama-3.1-8B-Instruct", "base_model:quantized:meta-llama/Llama-3.1-8B-Instruct", "license:llama3.1", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2024-07-26T18:58:13.000Z
neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-3.1-8B-Instruct
695
text-generation
695
0
0
0
0
780,117
66aac17986490de8da1eaca9
lmstudio-community/Mistral-Nemo-Instruct-2407-GGUF
lmstudio-community
False
library-not-detected
2024-07-31T23:05:45.000Z
14
false
532c3588dc75bc304efd4b11966b278368817f44
null
9,595
[ "gguf", "text-generation", "en", "fr", "de", "es", "it", "pt", "ru", "zh", "ja", "base_model:mistralai/Mistral-Nemo-Instruct-2407", "base_model:quantized:mistralai/Mistral-Nemo-Instruct-2407", "license:apache-2.0", "region:us" ]
text-generation
null
2024-07-31T22:58:01.000Z
lmstudio-community/Mistral-Nemo-Instruct-2407-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Mistral-Nemo-Instruct-2407-IQ2_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-IQ3_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-IQ4_XS.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q3_K_L.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q4_K_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q5_K_M.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q6_K.gguf" }, { "rfilename": "Mistral-Nemo-Instruct-2407-Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
mistralai/Mistral-Nemo-Instruct-2407
101
text-generation
101
0
0
0
0
791,889
65c3c66ed853e2702e10009c
neuralmagic/zephyr-7b-beta-marlin
neuralmagic
False
not-popular-enough
2024-03-06T02:11:36.000Z
1
false
99a5e49790b8a190ef3dfe2393ee3f447a4f8f82
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"bits": 4, "quant_method": "gptq"}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
9,375
[ "transformers", "safetensors", "mistral", "text-generation", "nm-vllm", "marlin", "int4", "conversational", "arxiv:2210.17323", "base_model:HuggingFaceH4/zephyr-7b-beta", "base_model:quantized:HuggingFaceH4/zephyr-7b-beta", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "4-bit", "gptq", "region:us" ]
text-generation
transformers
2024-02-07T18:05:34.000Z
neuralmagic/zephyr-7b-beta-marlin
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quantization/apply_gptq_save_marlin.py" }, { "rfilename": "quantization/requirements.txt" }, { "rfilename": "quantize_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
mistral
HuggingFaceH4/zephyr-7b-beta
439
text-generation
439
0
0
0
0
484,327
6508e41ff36bb51c5001973b
TheBloke/Llama-2-13B-chat-AWQ
TheBloke
False
explicit-opt-out
2023-11-09T18:21:10.000Z
26
false
e609cfc317ae4da50bb1839b8dcc3bf3a65f0fb7
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "quantization_config": {"quant_method": "awq", "bits": 4}, "tokenizer_config": {"bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}}}
9,341
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "base_model:meta-llama/Llama-2-13b-chat-hf", "base_model:quantized:meta-llama/Llama-2-13b-chat-hf", "license:llama2", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-09-18T23:58:23.000Z
TheBloke/Llama-2-13B-chat-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE.txt" }, { "rfilename": "Notice" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Llama-2-13b-chat-hf
318
text-generation
318
0
0
0
0
317,974
66d1c810d460a0c31211f913
mowen222/Qwen-Qwen1.5-7B-1725024272
mowen222
False
pipeline-not-detected
2024-08-30T13:24:51.000Z
0
false
a68dfa5c612bc65c4c4b72809daa457ac50330a5
{"tokenizer_config": {"bos_token": null, "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|endoftext|>", "pad_token": "<|endoftext|>", "unk_token": null}, "peft": {"base_model_name_or_path": "Qwen/Qwen1.5-7B", "task_type": "CAUSAL_LM"}}
9,221
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:Qwen/Qwen1.5-7B", "base_model:adapter:Qwen/Qwen1.5-7B", "region:us" ]
null
peft
2024-08-30T13:24:32.000Z
mowen222/Qwen-Qwen1.5-7B-1725024272
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "adapter_config.json" }, { "rfilename": "adapter_model.safetensors" }, { "rfilename": "added_tokens.json" }, { "rfilename": "merges.txt" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "training_args.bin" }, { "rfilename": "vocab.json" } ]
null
Qwen/Qwen1.5-7B
6,515
text-generation
6,515
0
0
0
0
877,815
66252cb1ffe1be5ef3797017
shenzhi-wang/Llama3-8B-Chinese-Chat
shenzhi-wang
False
not-popular-enough
2024-07-04T10:08:56.000Z
634
false
f25f13cb2571e70e285121faceac92926b51e6f5
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{ '<|begin_of_text|>' }}{% set system_message = 'You are a helpful assistant.' %}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% set loop_messages = messages[1:] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ '<|start_header_id|>system<|end_header_id|>\n\n' + system_message | trim + '<|eot_id|>' }}{% endif %}{% for message in loop_messages %}{{ '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}", "eos_token": "<|eot_id|>", "pad_token": "<|eot_id|>"}}
9,187
[ "transformers", "safetensors", "llama", "text-generation", "llama-factory", "orpo", "conversational", "en", "zh", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:finetune:meta-llama/Meta-Llama-3-8B-Instruct", "doi:10.57967/hf/2316", "license:llama3", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-04-21T15:11:45.000Z
shenzhi-wang/Llama3-8B-Chinese-Chat
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
meta-llama/Meta-Llama-3-8B-Instruct
1,076
text-generation
1,076
1
6
7
14
597,316
655de1d96a37ab8e1c719a58
peft-internal-testing/gpt2-lora-random
peft-internal-testing
False
pipeline-not-detected
2023-11-22T11:15:49.000Z
0
false
70e66605fa293b69e07d0c20a841c29f55734b1b
{"peft": {"base_model_name_or_path": "gpt2", "task_type": "CAUSAL_LM"}}
9,000
[ "peft", "safetensors", "arxiv:1910.09700", "base_model:openai-community/gpt2", "base_model:adapter:openai-community/gpt2", "region:us" ]
null
peft
2023-11-22T11:11:21.000Z
peft-internal-testing/gpt2-lora-random
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "adapter_config.json" }, { "rfilename": "adapter_model.safetensors" } ]
null
openai-community/gpt2
2,686
text-generation
2,686
0
0
0
0
389,109
668fbcec5cb4c07caad7ceac
rtzr/ko-gemma-2-9b-it
rtzr
False
not-popular-enough
2024-07-15T08:35:00.000Z
62
false
c9aea5c899021d60dc0e0b051b00a504e5d9c7ba
{"architectures": ["Gemma2ForCausalLM"], "model_type": "gemma2", "tokenizer_config": {"bos_token": "<bos>", "chat_template": "{{ '<bos>' }}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<start_of_turn>user\n' + content + '<end_of_turn>\n<start_of_turn>model\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<end_of_turn>\n' }}{% endif %}{% endfor %}", "eos_token": "<eos>", "pad_token": "<pad>", "unk_token": "<unk>", "use_default_system_prompt": false}}
8,945
[ "transformers", "safetensors", "gemma2", "text-generation", "conversational", "ko", "arxiv:2305.18290", "base_model:google/gemma-2-9b", "base_model:finetune:google/gemma-2-9b", "license:gemma", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-07-11T11:07:24.000Z
rtzr/ko-gemma-2-9b-it
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00010.safetensors" }, { "rfilename": "model-00002-of-00010.safetensors" }, { "rfilename": "model-00003-of-00010.safetensors" }, { "rfilename": "model-00004-of-00010.safetensors" }, { "rfilename": "model-00005-of-00010.safetensors" }, { "rfilename": "model-00006-of-00010.safetensors" }, { "rfilename": "model-00007-of-00010.safetensors" }, { "rfilename": "model-00008-of-00010.safetensors" }, { "rfilename": "model-00009-of-00010.safetensors" }, { "rfilename": "model-00010-of-00010.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" } ]
gemma2
google/gemma-2-9b
105
text-generation
105
9
1
3
5
746,778
653bb81bea8b6b5fe3a67097
TheBloke/zephyr-7B-beta-AWQ
TheBloke
False
explicit-opt-out
2023-11-09T18:16:40.000Z
38
false
ad7867b7a09d5504c86cc015d2d3a73904d488dd
{"architectures": ["MistralForCausalLM"], "model_type": "mistral", "quantization_config": {"quant_method": "awq", "bits": 4}, "tokenizer_config": {"bos_token": "<s>", "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}", "eos_token": "</s>", "pad_token": "</s>", "unk_token": "<unk>", "use_default_system_prompt": true}}
8,430
[ "transformers", "safetensors", "mistral", "text-generation", "generated_from_trainer", "conversational", "en", "dataset:HuggingFaceH4/ultrachat_200k", "dataset:HuggingFaceH4/ultrafeedback_binarized", "arxiv:2305.18290", "arxiv:2310.16944", "base_model:HuggingFaceH4/zephyr-7b-beta", "base_model:quantized:HuggingFaceH4/zephyr-7b-beta", "license:mit", "autotrain_compatible", "text-generation-inference", "4-bit", "awq", "region:us" ]
text-generation
transformers
2023-10-27T13:16:11.000Z
TheBloke/zephyr-7B-beta-AWQ
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "all_results.json" }, { "rfilename": "config.json" }, { "rfilename": "eval_results.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model.safetensors" }, { "rfilename": "quant_config.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer.model" }, { "rfilename": "tokenizer_config.json" }, { "rfilename": "train_results.json" } ]
mistral
HuggingFaceH4/zephyr-7b-beta
439
text-generation
439
0
0
0
0
358,504
664e476c7a51d585f5865987
lmstudio-community/Mistral-7B-Instruct-v0.3-GGUF
lmstudio-community
False
library-not-detected
2024-05-22T19:48:33.000Z
23
false
29a785419661afc70b5cd91b5023a835b0092281
null
8,413
[ "gguf", "text-generation", "base_model:mistralai/Mistral-7B-Instruct-v0.3", "base_model:quantized:mistralai/Mistral-7B-Instruct-v0.3", "license:apache-2.0", "region:us" ]
text-generation
null
2024-05-22T19:28:44.000Z
lmstudio-community/Mistral-7B-Instruct-v0.3-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Mistral-7B-Instruct-v0.3-IQ3_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-IQ4_NL.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-Q3_K_L.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-Q4_K_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-Q5_K_M.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-Q6_K.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-Q8_0.gguf" }, { "rfilename": "Mistral-7B-Instruct-v0.3-f32.gguf" }, { "rfilename": "README.md" } ]
null
mistralai/Mistral-7B-Instruct-v0.3
320
text-generation
320
0
0
0
0
653,678
66333832ab69b961af56d7be
vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B
vicgalle
False
not-popular-enough
2024-07-31T16:38:50.000Z
5
false
82e7ca13eaa4e5110aee050f5e7076a1571fe604
{"architectures": ["LlamaForCausalLM"], "model_type": "llama", "tokenizer_config": {"bos_token": "<|begin_of_text|>", "chat_template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "eos_token": "<|im_end|>", "pad_token": "<|end_of_text|>"}}
8,248
[ "transformers", "safetensors", "llama", "text-generation", "safety", "conversational", "dataset:vicgalle/configurable-system-prompt-multitask", "arxiv:2404.00495", "base_model:NousResearch/Hermes-2-Pro-Llama-3-8B", "base_model:finetune:NousResearch/Hermes-2-Pro-Llama-3-8B", "license:apache-2.0", "model-index", "autotrain_compatible", "text-generation-inference", "endpoints_compatible", "region:us" ]
text-generation
transformers
2024-05-02T06:52:34.000Z
vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "generation_config.json" }, { "rfilename": "model-00001-of-00004.safetensors" }, { "rfilename": "model-00002-of-00004.safetensors" }, { "rfilename": "model-00003-of-00004.safetensors" }, { "rfilename": "model-00004-of-00004.safetensors" }, { "rfilename": "model.safetensors.index.json" }, { "rfilename": "special_tokens_map.json" }, { "rfilename": "tokenizer.json" }, { "rfilename": "tokenizer_config.json" } ]
llama
NousResearch/Hermes-2-Pro-Llama-3-8B
131
text-generation
131
0
2
0
1
616,275
65145594e31c0e2e3dfabe9a
TheBloke/Mistral-7B-v0.1-GGUF
TheBloke
False
explicit-opt-out
2023-09-28T22:42:44.000Z
241
false
d4ae605152c8de0d6570cf624c083fa57dd0d551
{"model_type": "mistral"}
8,103
[ "transformers", "gguf", "mistral", "pretrained", "text-generation", "base_model:mistralai/Mistral-7B-v0.1", "base_model:quantized:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "text-generation-inference", "region:us" ]
text-generation
transformers
2023-09-27T16:17:24.000Z
TheBloke/Mistral-7B-v0.1-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "README.md" }, { "rfilename": "config.json" }, { "rfilename": "mistral-7b-v0.1.Q2_K.gguf" }, { "rfilename": "mistral-7b-v0.1.Q3_K_L.gguf" }, { "rfilename": "mistral-7b-v0.1.Q3_K_M.gguf" }, { "rfilename": "mistral-7b-v0.1.Q3_K_S.gguf" }, { "rfilename": "mistral-7b-v0.1.Q4_0.gguf" }, { "rfilename": "mistral-7b-v0.1.Q4_K_M.gguf" }, { "rfilename": "mistral-7b-v0.1.Q4_K_S.gguf" }, { "rfilename": "mistral-7b-v0.1.Q5_0.gguf" }, { "rfilename": "mistral-7b-v0.1.Q5_K_M.gguf" }, { "rfilename": "mistral-7b-v0.1.Q5_K_S.gguf" }, { "rfilename": "mistral-7b-v0.1.Q6_K.gguf" }, { "rfilename": "mistral-7b-v0.1.Q8_0.gguf" } ]
mistral
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
0
0
0
0
326,929
663a944ba2da73482bcf5f52
bartowski/JSL-MedLlama-3-8B-v2.0-GGUF
bartowski
False
library-not-detected
2024-05-07T21:07:22.000Z
6
false
b6133e11836b62f531df013224fa45a08256da47
null
8,102
[ "gguf", "llama-3-8b", "sft", "medical", "text-generation", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:quantized:meta-llama/Meta-Llama-3-8B", "license:cc-by-nc-nd-4.0", "region:us" ]
text-generation
null
2024-05-07T20:51:23.000Z
bartowski/JSL-MedLlama-3-8B-v2.0-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ1_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ1_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ2_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ2_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ2_XS.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ2_XXS.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ3_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ3_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ3_XS.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ3_XXS.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ4_NL.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-IQ4_XS.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q2_K.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q3_K_L.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q3_K_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q3_K_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q4_K_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q4_K_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q5_K_M.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q5_K_S.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q6_K.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0-Q8_0.gguf" }, { "rfilename": "JSL-MedLlama-3-8B-v2.0.imatrix" }, { "rfilename": "README.md" } ]
null
meta-llama/Meta-Llama-3-8B
976
text-generation
976
0
0
0
0
626,787
65e2a4d900680b19bc7ee382
NousResearch/Hermes-2-Pro-Mistral-7B-GGUF
NousResearch
False
library-not-detected
2024-03-28T20:07:04.000Z
221
false
594e3e33f57a2b8693972e6bf48ae4eff404f170
null
7,673
[ "gguf", "Mistral", "instruct", "finetune", "chatml", "DPO", "RLHF", "gpt4", "synthetic data", "distillation", "function calling", "json mode", "en", "dataset:teknium/OpenHermes-2.5", "base_model:mistralai/Mistral-7B-v0.1", "base_model:quantized:mistralai/Mistral-7B-v0.1", "license:apache-2.0", "region:us" ]
null
null
2024-03-02T04:02:33.000Z
NousResearch/Hermes-2-Pro-Mistral-7B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q2_K.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q3_K_L.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q3_K_M.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q3_K_S.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q4_0.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q4_K_M.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q4_K_S.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q5_0.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q5_K_M.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q5_K_S.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q6_K.gguf" }, { "rfilename": "Hermes-2-Pro-Mistral-7B.Q8_0.gguf" }, { "rfilename": "README.md" } ]
null
mistralai/Mistral-7B-v0.1
2,038
text-generation
2,038
0
0
0
0
516,110
6621815485446629f3a18a8b
lmstudio-community/Meta-Llama-3-8B-Instruct-GGUF
lmstudio-community
False
library-not-detected
2024-05-03T13:53:50.000Z
173
false
0910a3e69201d274d4fd68e89448114cd78e4c82
null
7,231
[ "gguf", "facebook", "meta", "pytorch", "llama", "llama-3", "text-generation", "en", "base_model:meta-llama/Meta-Llama-3-8B-Instruct", "base_model:quantized:meta-llama/Meta-Llama-3-8B-Instruct", "license:llama3", "region:us" ]
text-generation
null
2024-04-18T20:23:48.000Z
lmstudio-community/Meta-Llama-3-8B-Instruct-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "LICENSE" }, { "rfilename": "Meta-Llama-3-8B-Instruct-IQ3_M.gguf" }, { "rfilename": "Meta-Llama-3-8B-Instruct-Q4_K_M.gguf" }, { "rfilename": "Meta-Llama-3-8B-Instruct-Q5_K_M.gguf" }, { "rfilename": "Meta-Llama-3-8B-Instruct-Q6_K.gguf" }, { "rfilename": "Meta-Llama-3-8B-Instruct-Q8_0.gguf" }, { "rfilename": "Meta-Llama-3-8B-Instruct.imatrix" }, { "rfilename": "README.md" }, { "rfilename": "USE_POLICY.md" } ]
null
meta-llama/Meta-Llama-3-8B-Instruct
1,076
text-generation
1,076
0
0
2
0
592,735
6633eb56fa0a333f7af01575
bartowski/Einstein-v6.1-Llama3-8B-GGUF
bartowski
False
library-not-detected
2024-05-02T19:53:53.000Z
6
false
ec0fa319480f2423ac82d90de3ff3ba36e628bde
null
7,156
[ "gguf", "axolotl", "generated_from_trainer", "instruct", "finetune", "chatml", "gpt4", "synthetic data", "science", "physics", "chemistry", "biology", "math", "llama", "llama3", "text-generation", "en", "dataset:allenai/ai2_arc", "dataset:camel-ai/physics", "dataset:camel-ai/chemistry", "dataset:camel-ai/biology", "dataset:camel-ai/math", "dataset:metaeval/reclor", "dataset:openbookqa", "dataset:mandyyyyii/scibench", "dataset:derek-thomas/ScienceQA", "dataset:TIGER-Lab/ScienceEval", "dataset:jondurbin/airoboros-3.2", "dataset:LDJnr/Capybara", "dataset:Cot-Alpaca-GPT4-From-OpenHermes-2.5", "dataset:STEM-AI-mtl/Electrical-engineering", "dataset:knowrohit07/saraswati-stem", "dataset:sablo/oasst2_curated", "dataset:lmsys/lmsys-chat-1m", "dataset:TIGER-Lab/MathInstruct", "dataset:bigbio/med_qa", "dataset:meta-math/MetaMathQA-40K", "dataset:piqa", "dataset:scibench", "dataset:sciq", "dataset:Open-Orca/SlimOrca", "dataset:migtissera/Synthia-v1.3", "dataset:allenai/WildChat", "dataset:microsoft/orca-math-word-problems-200k", "dataset:openchat/openchat_sharegpt4_dataset", "dataset:teknium/GPTeacher-General-Instruct", "dataset:m-a-p/CodeFeedback-Filtered-Instruction", "dataset:totally-not-an-llm/EverythingLM-data-V3", "dataset:HuggingFaceH4/no_robots", "dataset:OpenAssistant/oasst_top1_2023-08-25", "dataset:WizardLM/WizardLM_evol_instruct_70k", "base_model:meta-llama/Meta-Llama-3-8B", "base_model:quantized:meta-llama/Meta-Llama-3-8B", "license:other", "model-index", "region:us" ]
text-generation
null
2024-05-02T19:36:54.000Z
bartowski/Einstein-v6.1-Llama3-8B-GGUF
[ { "rfilename": ".gitattributes" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ1_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ1_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ2_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ2_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ2_XS.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ2_XXS.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ3_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ3_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ3_XS.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ3_XXS.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ4_NL.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-IQ4_XS.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q2_K.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q3_K_L.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q3_K_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q3_K_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q4_K_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q4_K_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q5_K_M.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q5_K_S.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q6_K.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B-Q8_0.gguf" }, { "rfilename": "Einstein-v6.1-Llama3-8B.imatrix" }, { "rfilename": "README.md" } ]
null
meta-llama/Meta-Llama-3-8B
976
text-generation
976
0
0
0
0
617,487