import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Load tokenizer and model tokenizer = AutoTokenizer.from_pretrained("Unbabel/TowerBase-13B-v0.1") model = AutoModelForCausalLM.from_pretrained("Unbabel/TowerBase-13B-v0.1",device_map="auto", load_in_4bit=True) # Define translation function def translate_text(source_lang, target_lang, text): input_text = f"{source_lang}: {text}\n{target_lang}:" inputs = tokenizer(input_text, return_tensors="pt") outputs = model.generate(**inputs, max_length=150) translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) return translated_text # Define interface iface = gr.Interface( fn=translate_text, inputs=[ gr.inputs.Dropdown(["English", "Spanish", "Vietnamese", "French", "Portuguese"], label="Source Language"), gr.inputs.Dropdown(["English", "Spanish", "Vietnamese", "French", "Portuguese"], label="Target Language"), gr.inputs.Textbox(lines=5, label="Input Text") ], outputs=gr.outputs.Textbox(label="Translated Text") ) # Run the interface iface.launch(share=True)