mixtral 22b
Browse files
app.py
CHANGED
@@ -15,7 +15,7 @@ from langchain_text_splitters import RecursiveCharacterTextSplitter
|
|
15 |
|
16 |
# Initialize the LLMs
|
17 |
llm = Together(
|
18 |
-
model="mistralai/Mixtral-
|
19 |
temperature=0.2,
|
20 |
top_k=12,
|
21 |
max_tokens=22048,
|
@@ -87,7 +87,7 @@ def app():
|
|
87 |
)
|
88 |
|
89 |
system_prompt = (
|
90 |
-
"You are
|
91 |
"Use the following pieces of retrieved context to answer "
|
92 |
"the question. If you don't know the answer, say that you "
|
93 |
"don't know."
|
@@ -117,7 +117,7 @@ def app():
|
|
117 |
if "messages" not in st.session_state.keys():
|
118 |
st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
|
119 |
|
120 |
-
st.header("
|
121 |
for message in st.session_state.messages:
|
122 |
with st.chat_message(message["role"]):
|
123 |
st.write(message["content"])
|
|
|
15 |
|
16 |
# Initialize the LLMs
|
17 |
llm = Together(
|
18 |
+
model="mistralai/Mixtral-8x22B-Instruct-v0.1",
|
19 |
temperature=0.2,
|
20 |
top_k=12,
|
21 |
max_tokens=22048,
|
|
|
87 |
)
|
88 |
|
89 |
system_prompt = (
|
90 |
+
"You are helping a doctor. Be as detailed and thorough as possible "
|
91 |
"Use the following pieces of retrieved context to answer "
|
92 |
"the question. If you don't know the answer, say that you "
|
93 |
"don't know."
|
|
|
117 |
if "messages" not in st.session_state.keys():
|
118 |
st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
|
119 |
|
120 |
+
st.header("Hello Doc!")
|
121 |
for message in st.session_state.messages:
|
122 |
with st.chat_message(message["role"]):
|
123 |
st.write(message["content"])
|