ngebodh commited on
Commit
be9d4b9
1 Parent(s): e216102

Updated app.py

Browse files

Cleaned up comments

Files changed (1) hide show
  1. app.py +4 -19
app.py CHANGED
@@ -3,8 +3,8 @@ from openai import OpenAI
3
  import os
4
  import sys
5
  from langchain.callbacks import StreamlitCallbackHandler
6
- from dotenv import load_dotenv, dotenv_values
7
- load_dotenv()
8
 
9
 
10
  if 'key' not in st.session_state:
@@ -49,7 +49,7 @@ st.title(f'ChatBot Using {selected_model}')
49
 
50
  # Set a default model
51
  if selected_model not in st.session_state:
52
- st.session_state[selected_model] = model_links[selected_model] #"google/gemma-7b-it"
53
 
54
  # Initialize chat history
55
  if "messages" not in st.session_state:
@@ -76,24 +76,9 @@ if prompt := st.chat_input("What is up?"):
76
  # Display assistant response in chat message container
77
  with st.chat_message("assistant"):
78
  st_callback = StreamlitCallbackHandler(st.container())
79
- # st_callback =stream_handler
80
-
81
-
82
- # stream = client.completions.create(
83
- # model="google/gemma-7b-it",
84
- # prompt="You are a helpful agent in a question answer exhange. Give you best answer to the questions. {prompt}",
85
- # # messages=[
86
- # # {"role": m["role"], "content": m["content"]}
87
- # # for m in st.session_state.messages
88
- # # ],
89
- # temperature=0.5,
90
- # stream=True,
91
- # max_tokens=3000
92
- # )
93
-
94
 
95
  stream = client.chat.completions.create(
96
- model=model_links[selected_model],#"google/gemma-7b-it",
97
  messages=[
98
  {"role": m["role"], "content": m["content"]}
99
  for m in st.session_state.messages
 
3
  import os
4
  import sys
5
  from langchain.callbacks import StreamlitCallbackHandler
6
+ # from dotenv import load_dotenv, dotenv_values
7
+ # load_dotenv()
8
 
9
 
10
  if 'key' not in st.session_state:
 
49
 
50
  # Set a default model
51
  if selected_model not in st.session_state:
52
+ st.session_state[selected_model] = model_links[selected_model]
53
 
54
  # Initialize chat history
55
  if "messages" not in st.session_state:
 
76
  # Display assistant response in chat message container
77
  with st.chat_message("assistant"):
78
  st_callback = StreamlitCallbackHandler(st.container())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
  stream = client.chat.completions.create(
81
+ model=model_links[selected_model],
82
  messages=[
83
  {"role": m["role"], "content": m["content"]}
84
  for m in st.session_state.messages