File size: 1,168 Bytes
a6c26b1
 
 
e18dfac
a6c26b1
e18dfac
a6c26b1
 
 
 
 
 
 
7801fa3
a6c26b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
api: "sncloud" #  set either sambastudio or sncloud

embedding_model: 
    "type": "sambastudio" # set either sambastudio or cpu
    "batch_size": 1 #set depending of your endpoint configuration (1 if CoE embedding expert)
    "coe": False #set true if using Sambastudio embeddings in a CoE endpoint 
    "select_expert": "e5-mistral-7b-instruct" #set if using SambaStudio CoE embedding expert

llm: 
    "temperature": 0.0
    "do_sample": False
    "max_tokens_to_generate": 1200
    "coe": True #set as true if using Sambastudio CoE endpoint
    "select_expert": "Meta-Llama-3.1-8B-Instruct" #set if using sncloud, SambaStudio CoE llm expert
    #sncloud CoE expert name -> "llama3-8b"

retrieval:
    "k_retrieved_documents": 15 #set if rerank enabled 
    "score_threshold": 0.2
    "rerank": False # set if you want to rerank retriever results 
    "reranker": 'BAAI/bge-reranker-large' # set if you rerank enabled
    "final_k_retrieved_documents": 5

pdf_only_mode: True  # Set to true for PDF-only mode, false for all file types
prod_mode: False

prompts: 
    "qa_prompt": "prompts/qa_prompt.yaml"
    "final_chain_prompt": "prompts/final_chain_prompt.yaml"