ccm commited on
Commit
2249b9c
β€’
1 Parent(s): 0860342

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +3 -11
main.py CHANGED
@@ -15,7 +15,8 @@ import transformers # to load an LLM
15
  GREETING = (
16
  "Howdy! I'm an AI agent that uses a [retrieval-augmented generation]("
17
  "https://en.wikipedia.org/wiki/Retrieval-augmented_generation) pipeline to answer questions about research by the "
18
- "Design Research Collective. And the best part is that I always cite my sources! What can I tell you about today?"
 
19
  )
20
  EXAMPLE_QUERIES = [
21
  "Tell me about new research at the intersection of additive manufacturing and machine learning",
@@ -94,8 +95,7 @@ def search(query: str, k: int) -> tuple[str, str]:
94
  )
95
 
96
  search_results += (
97
- "\nIf these abstract aren't relevant to the following query, please reply 'I am unsure' or "
98
- "similar. Respond to the following query from the perspective of the provided abstracts only:"
99
  )
100
 
101
  return search_results, references
@@ -150,14 +150,6 @@ def reply(message: str, history: list[str]) -> str:
150
  message, bypass = preprocess(message)
151
 
152
  # This is some handling that is applied to the history variable to put it in a good format
153
- # if isinstance(history, list):
154
- # if len(history) > 0:
155
- # history = history[-1]
156
- # print(history)
157
- # history_transformer_format = [
158
- # {"role": "assistant" if idx & 1 else "user", "content": msg}
159
- # for idx, msg in enumerate(history)
160
- # ] + [{"role": "user", "content": message}]
161
  history_transformer_format = [
162
  {"role": role, "content": message_pair[idx]}
163
  for message_pair in history
 
15
  GREETING = (
16
  "Howdy! I'm an AI agent that uses a [retrieval-augmented generation]("
17
  "https://en.wikipedia.org/wiki/Retrieval-augmented_generation) pipeline to answer questions about research by the "
18
+ "[Design Research Collective](https://cmudrc.github.io/). And the best part is that I always cite my sources! What"
19
+ " can I tell you about today?"
20
  )
21
  EXAMPLE_QUERIES = [
22
  "Tell me about new research at the intersection of additive manufacturing and machine learning",
 
95
  )
96
 
97
  search_results += (
98
+ "\nUsing the information provided above, respond to this query: "
 
99
  )
100
 
101
  return search_results, references
 
150
  message, bypass = preprocess(message)
151
 
152
  # This is some handling that is applied to the history variable to put it in a good format
 
 
 
 
 
 
 
 
153
  history_transformer_format = [
154
  {"role": role, "content": message_pair[idx]}
155
  for message_pair in history