akhud commited on
Commit
f6f3d5d
1 Parent(s): b210fea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -1
app.py CHANGED
@@ -1,13 +1,18 @@
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain.llms import CTransformers
 
4
 
5
  ## Function To get response from LLAma 2 model
6
 
7
  def getLLamaresponse(input_text,no_words,blog_style):
8
 
 
 
 
 
9
  ### LLama2 model
10
- llm=CTransformers(model='https://huggingface.co/spaces/akhud/llama2-blog-gen/raw/main/llama-2-7b-chat.ggmlv3.q8_0.bin',
11
  model_type='llama',
12
  config={'max_new_tokens':256,
13
  'temperature':0.01})
 
1
  import streamlit as st
2
  from langchain.prompts import PromptTemplate
3
  from langchain.llms import CTransformers
4
+ from transformers import AutoModel
5
 
6
  ## Function To get response from LLAma 2 model
7
 
8
  def getLLamaresponse(input_text,no_words,blog_style):
9
 
10
+ # Load model directly
11
+
12
+ model = AutoModel.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML")
13
+
14
  ### LLama2 model
15
+ llm=CTransformers(model=model,
16
  model_type='llama',
17
  config={'max_new_tokens':256,
18
  'temperature':0.01})