GPT-Docker / app /run.py
heikowagner's picture
Update app/run.py
f02eb64
raw
history blame
1.82 kB
# This script inits the models and adds an example collection to the Vectorstore
# %%
import os
import pathlib
from load_model import load_embedding
from utils import get_chroma_client
from load_vectors import load_from_web, create_and_add, load_and_split, metadata_generator
current_path = str( pathlib.Path(__file__).parent.resolve() )
with open(current_path+'/.openaiapikey', 'r') as reader:
os.environ['OPENAI_API_KEY']=reader.read()
import load_model
# %%
#load_model.load_gpu_model("decapoda-research/llama-7b-hf") #Download local model
#llm= load_model.load_openai_model()
# %%
#Load example Data
client = get_chroma_client()
client.reset()
ef = load_embedding("hkunlp/instructor-large")
collection_name="papers"
metadata= {"loaded_docs":[], "Subject":"Heikos Papers", "model_name": ef.model_name}
selected_collection = client.create_collection(collection_name, embedding_function=ef, metadata=metadata)
docs_tarifs= [
"https://edoc.hu-berlin.de/bitstream/handle/18452/5294/33.pdf",
"https://arxiv.org/pdf/1702.03556v3.pdf",
"https://arxiv.org/pdf/1706.03762"
]
# %%
# Load collection to get metadata
loaded_collection = client.get_collection(collection_name)
model_name = loaded_collection.metadata['model_name']
# %%
docs = load_from_web(docs_tarifs)
sub_docs = load_and_split(docs, chunk_size=1000)
create_and_add(collection_name, sub_docs, model_name, metadata)
# %%
llm= load_model.load_cpu_model()
chain = load_model.create_chain(llm, collection=collection_name, model_name=model_name, metadata=metadata, k=1)
result = chain({"query": "Ist mein Kinderwagen bei einem Leitungswasserschaden mitversichert?"})
print(result)
#llm= load_model.load_openai_model(temperature=0.1)
#llm= load_model.load_cpu_model()
#meta= metadata_generator(docs[0], llm)
# %%
#print(meta)
# %%