Spaces:
Build error
Build error
File size: 6,334 Bytes
22d7b97 1e5090f 22d7b97 830b0b6 22d7b97 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 |
# template used in production for HuggingChat.
MODELS=`[
{
"name": "meta-llama/Llama-2-70b-chat-hf",
"description": "The latest and biggest model from Meta, fine-tuned for chat.",
"websiteUrl": "https://ai.meta.com/llama/",
"userMessageToken": "",
"userMessageEndToken": " [/INST] ",
"assistantMessageToken": "",
"assistantMessageEndToken": " </s><s>[INST] ",
"preprompt": " ",
"chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 1000,
"max_new_tokens": 1024
}
},
{
"name": "codellama/CodeLlama-34b-Instruct-hf",
"displayName": "codellama/CodeLlama-34b-Instruct-hf",
"description": "Code Llama, a state of the art code model from Meta.",
"websiteUrl": "https://about.fb.com/news/2023/08/code-llama-ai-for-coding/",
"userMessageToken": "",
"userMessageEndToken": " [/INST] ",
"assistantMessageToken": "",
"assistantMessageEndToken": " </s><s>[INST] ",
"preprompt": " ",
"chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Fibonacci in Python",
"prompt": "Write a python function to calculate the nth fibonacci number."
}, {
"title": "JavaScript promises",
"prompt": "How can I wait for multiple JavaScript promises to fulfill before doing something with their values?"
}, {
"title": "Rust filesystem",
"prompt": "How can I load a file from disk in Rust?"
}
],
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 1000,
"max_new_tokens": 2048
}
},
{
"name": "tiiuae/falcon-180B-chat",
"displayName": "tiiuae/falcon-180B-chat",
"description": "Falcon-180B is a 180B parameters causal decoder-only model built by TII and trained on 3,500B tokens.",
"websiteUrl": "https://www.tii.ae/news/technology-innovation-institute-introduces-worlds-most-powerful-open-llm-falcon-180b",
"preprompt": " ",
"chatPromptTemplate": "System: {{preprompt}}\nUser:{{#each messages}}{{#ifUser}}{{content}}\nFalcon:{{/ifUser}}{{#ifAssistant}}{{content}}\nUser:{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 1000,
"max_new_tokens": 1024,
"stop": ["User:"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name": "mistralai/Mistral-7B-Instruct-v0.1",
"displayName": "mistralai/Mistral-7B-Instruct-v0.1",
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 1000,
"max_new_tokens": 2048,
"stop": ["</s>"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
]`
OLD_MODELS=`[{"name":"bigcode/starcoder"}, {"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"}]`
TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1'
APP_BASE="/chat"
PUBLIC_ORIGIN=https://huggingface.co
PUBLIC_SHARE_PREFIX=https://hf.co/chat
PUBLIC_ANNOUNCEMENT_BANNERS=`[
{
"title": "Websearch 2.0, now with RAG & sources",
"linkTitle": "Discuss",
"linkHref": "https://huggingface.co/spaces/huggingchat/chat-ui/discussions/254"
}
]`
PUBLIC_APP_NAME=HuggingChat
PUBLIC_APP_ASSETS=huggingchat
PUBLIC_APP_COLOR=yellow
PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
PUBLIC_APP_DATA_SHARING=1
PUBLIC_APP_DISCLAIMER=1
RATE_LIMIT=16
MESSAGES_BEFORE_LOGIN=1# how many messages a user can send in a conversation before having to login. set to 0 to force login right away
# Not part of the .env but set as other variables in the space
# ADDRESS_HEADER=X-Forwarded-For
# XFF_DEPTH=2 |