Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import warnings | |
warnings.simplefilter("ignore") | |
def main(): | |
tokenizer = AutoTokenizer.from_pretrained("Unbabel/TowerBase-13B-v0.1") | |
model = AutoModelForCausalLM.from_pretrained("Unbabel/TowerBase-13B-v0.1", device="cuda" if st.session_state.use_gpu else "cpu", load_in_4bit=True) | |
languages = ["English", "Spanish", "Vietnamese", "French", "Portuguese"] | |
st.sidebar.title("Translation App") | |
st.sidebar.write("Choose source and target languages:") | |
source_lang_index = st.sidebar.selectbox("Source Language", languages) | |
target_lang_index = st.sidebar.selectbox("Target Language", languages) | |
source_lang = languages.index(source_lang_index) | |
target_lang = languages.index(target_lang_index) | |
text = st.text_area(f"Enter text in {source_lang_index}", "") | |
if st.button("Translate"): | |
input_text = f"{source_lang_index}: {text}\n{target_lang_index}:" | |
inputs = tokenizer(input_text, return_tensors="pt") | |
outputs = model.generate(**inputs, max_new_tokens=20) | |
translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
st.write(f"Translation in {target_lang_index}: {translated_text}") | |
if __name__ == "__main__": | |
main() | |