zahraghamari commited on
Commit
2da898d
1 Parent(s): 7936791

Update .gitattributes and README.md

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. README.md +0 -10
  3. app.py +1 -1
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ Dindex.faiss filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,13 +1,3 @@
1
  ---
2
- title: QA
3
- emoji: 🏢
4
- colorFrom: red
5
- colorTo: pink
6
- sdk: streamlit
7
- sdk_version: 1.34.0
8
- app_file: app.py
9
- pinned: false
10
  license: apache-2.0
11
  ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
 
 
 
 
 
 
 
 
2
  license: apache-2.0
3
  ---
 
 
app.py CHANGED
@@ -56,7 +56,7 @@ def answer_by_llm(question, context):
56
  # Set up Groq client
57
  GPT_MODEL = "llama3-70b-8192"
58
  OPENAI_TOKEN = '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'
59
- client = Groq(api_key=OPENAI_TOKEN, verify_ssl=False)
60
 
61
  # Define prompt template
62
  prompt_template = """Use the following context to answer the medical question at the end.
 
56
  # Set up Groq client
57
  GPT_MODEL = "llama3-70b-8192"
58
  OPENAI_TOKEN = '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'
59
+ client = Groq(api_key=OPENAI_TOKEN)
60
 
61
  # Define prompt template
62
  prompt_template = """Use the following context to answer the medical question at the end.