Spaces:
Runtime error
Runtime error
yirmibesogluz
commited on
Commit
•
5444ef5
1
Parent(s):
ba22794
Created utils file
Browse files- apps/home.py +3 -16
- apps/summarization.py +2 -15
- apps/utils.py +23 -0
apps/home.py
CHANGED
@@ -3,6 +3,7 @@ import streamlit as st
|
|
3 |
import time
|
4 |
from transformers import pipeline
|
5 |
import os
|
|
|
6 |
|
7 |
API_URL = "https://api-inference.huggingface.co/models/boun-tabi-LMG/TURNA"
|
8 |
|
@@ -35,22 +36,8 @@ def write():
|
|
35 |
#st.title('Turkish Language Generation')
|
36 |
#st.write('...with Turna')
|
37 |
input_text = st.text_area(label='Enter a text: ', height=100,
|
38 |
-
value="Türkiye'nin
|
39 |
if st.button("Generate"):
|
40 |
with st.spinner('Generating...'):
|
41 |
-
output = query(input_text)
|
42 |
st.success(output)
|
43 |
-
|
44 |
-
def query(payload):
|
45 |
-
#{"inputs": payload, ""}
|
46 |
-
while True:
|
47 |
-
response = requests.post(API_URL, json=payload)
|
48 |
-
if 'error' not in response.json():
|
49 |
-
output = response.json()[0]["generated_text"]
|
50 |
-
return output
|
51 |
-
else:
|
52 |
-
time.sleep(15)
|
53 |
-
print('Sending request again', flush=True)
|
54 |
-
|
55 |
-
def pipe():
|
56 |
-
pipe = pipeline("text2text-generation", model="boun-tabi-LMG/TURNA", tokenizer="boun-tabi-LMG/TURNA", temperature=0.7, repetition_penalty=0.5, top_p=0.9)
|
|
|
3 |
import time
|
4 |
from transformers import pipeline
|
5 |
import os
|
6 |
+
from utils import query
|
7 |
|
8 |
API_URL = "https://api-inference.huggingface.co/models/boun-tabi-LMG/TURNA"
|
9 |
|
|
|
36 |
#st.title('Turkish Language Generation')
|
37 |
#st.write('...with Turna')
|
38 |
input_text = st.text_area(label='Enter a text: ', height=100,
|
39 |
+
value="Türkiye'nin başkenti neresidir?")
|
40 |
if st.button("Generate"):
|
41 |
with st.spinner('Generating...'):
|
42 |
+
output = query(input_text, API_URL)
|
43 |
st.success(output)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/summarization.py
CHANGED
@@ -3,6 +3,7 @@ import streamlit as st
|
|
3 |
import time
|
4 |
from transformers import pipeline
|
5 |
import os
|
|
|
6 |
|
7 |
HF_AUTH_TOKEN = os.getenv('HF_AUTH_TOKEN')
|
8 |
headers = {"Authorization": f"Bearer {HF_AUTH_TOKEN}"}
|
@@ -75,18 +76,4 @@ def write():
|
|
75 |
if st.button("Generate"):
|
76 |
with st.spinner('Generating...'):
|
77 |
output = query(input_text, url, params)
|
78 |
-
st.success(output)
|
79 |
-
|
80 |
-
|
81 |
-
def query(text, url, params):
|
82 |
-
data = {"inputs": payload, "parameters": params}
|
83 |
-
while True:
|
84 |
-
response = requests.post(url, headers=headers, json=data)
|
85 |
-
if 'error' not in response.json():
|
86 |
-
output = response.json()[0]["generated_text"]
|
87 |
-
return output
|
88 |
-
else:
|
89 |
-
print(response.json())
|
90 |
-
time.sleep(15)
|
91 |
-
print('Sending request again', flush=True)
|
92 |
-
|
|
|
3 |
import time
|
4 |
from transformers import pipeline
|
5 |
import os
|
6 |
+
from utils import query
|
7 |
|
8 |
HF_AUTH_TOKEN = os.getenv('HF_AUTH_TOKEN')
|
9 |
headers = {"Authorization": f"Bearer {HF_AUTH_TOKEN}"}
|
|
|
76 |
if st.button("Generate"):
|
77 |
with st.spinner('Generating...'):
|
78 |
output = query(input_text, url, params)
|
79 |
+
st.success(output)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
apps/utils.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import time
|
3 |
+
from transformers import pipeline
|
4 |
+
import os
|
5 |
+
|
6 |
+
HF_AUTH_TOKEN = os.getenv('HF_AUTH_TOKEN')
|
7 |
+
headers = {"Authorization": f"Bearer {HF_AUTH_TOKEN}"}
|
8 |
+
|
9 |
+
def query(text, url, params=None):
|
10 |
+
data = {"inputs": text, "parameters": params}
|
11 |
+
while True:
|
12 |
+
response = requests.post(url, headers=headers, json=data)
|
13 |
+
if 'error' not in response.json():
|
14 |
+
output = response.json()[0]["generated_text"]
|
15 |
+
return output
|
16 |
+
else:
|
17 |
+
print(response.json())
|
18 |
+
time.sleep(15)
|
19 |
+
print('Sending request again', flush=True)
|
20 |
+
|
21 |
+
|
22 |
+
def pipe():
|
23 |
+
pipe = pipeline("text2text-generation", model="boun-tabi-LMG/TURNA", tokenizer="boun-tabi-LMG/TURNA", temperature=0.7, repetition_penalty=0.5, top_p=0.9)
|