Rajut commited on
Commit
1099636
1 Parent(s): 45e4e45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -31
app.py CHANGED
@@ -1,33 +1,28 @@
1
- import streamlit as st
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
- import warnings
4
 
5
- warnings.simplefilter("ignore")
6
-
7
- def main():
8
- tokenizer = AutoTokenizer.from_pretrained("Unbabel/TowerBase-13B-v0.1")
9
- model = AutoModelForCausalLM.from_pretrained("Unbabel/TowerBase-13B-v0.1", device="cuda" if st.session_state.use_gpu else "cpu", load_in_4bit=True)
10
-
11
- languages = ["English", "Spanish", "Vietnamese", "French", "Portuguese"]
12
-
13
- st.sidebar.title("Translation App")
14
- st.sidebar.write("Choose source and target languages:")
15
-
16
- source_lang_index = st.sidebar.selectbox("Source Language", languages)
17
- target_lang_index = st.sidebar.selectbox("Target Language", languages)
18
-
19
- source_lang = languages.index(source_lang_index)
20
- target_lang = languages.index(target_lang_index)
21
-
22
- text = st.text_area(f"Enter text in {source_lang_index}", "")
23
-
24
- if st.button("Translate"):
25
- input_text = f"{source_lang_index}: {text}\n{target_lang_index}:"
26
- inputs = tokenizer(input_text, return_tensors="pt")
27
- outputs = model.generate(**inputs, max_new_tokens=20)
28
-
29
- translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
30
- st.write(f"Translation in {target_lang_index}: {translated_text}")
31
-
32
- if __name__ == "__main__":
33
- main()
 
1
+ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
3
 
4
+ # Load tokenizer and model
5
+ tokenizer = AutoTokenizer.from_pretrained("Unbabel/TowerBase-13B-v0.1")
6
+ model = AutoModelForCausalLM.from_pretrained("Unbabel/TowerBase-13B-v0.1", device="cuda" if gr.utils.is_using_gpu() else "cpu", load_in_4bit=True)
7
+
8
+ # Define translation function
9
+ def translate_text(source_lang, target_lang, text):
10
+ input_text = f"{source_lang}: {text}\n{target_lang}:"
11
+ inputs = tokenizer(input_text, return_tensors="pt")
12
+ outputs = model.generate(**inputs, max_length=150)
13
+ translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
14
+ return translated_text
15
+
16
+ # Define interface
17
+ iface = gr.Interface(
18
+ fn=translate_text,
19
+ inputs=[
20
+ gr.inputs.Dropdown(["English", "Spanish", "Vietnamese", "French", "Portuguese"], label="Source Language"),
21
+ gr.inputs.Dropdown(["English", "Spanish", "Vietnamese", "French", "Portuguese"], label="Target Language"),
22
+ gr.inputs.Textbox(lines=5, label="Input Text")
23
+ ],
24
+ outputs=gr.outputs.Textbox(label="Translated Text")
25
+ )
26
+
27
+ # Run the interface
28
+ iface.launch(share=True)