charanhu commited on
Commit
5b8315e
1 Parent(s): 00d886f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -0
app.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import gradio as gr
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM
4
+
5
+ tokenizer = AutoTokenizer.from_pretrained("upstage/SOLAR-10.7B-Instruct-v1.0")
6
+ model = AutoModelForCausalLM.from_pretrained("upstage/SOLAR-10.7B-Instruct-v1.0")
7
+
8
+ def generate_response(prompt):
9
+ conversation = [{'role': 'user', 'content': prompt}]
10
+ prompt = tokenizer.apply_chat_template(conversation, tokenizer=False, add_generation_prompt=True)
11
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
12
+ outputs = model.generate(**inputs, use_cache=True, max_length=4096)
13
+ outputs_text = tokenizer.decode(outputs[0])
14
+ return outputs_text
15
+
16
+ iface = gr.Interface(fn=generate_response, inputs="text", outputs="text")
17
+ iface.launch()