Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
# Load pre-trained model and tokenizer | |
model_name = "t5-small" | |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
# Function to translate text | |
def translate_text(text, source_lang, target_lang): | |
input_text = f"translate {source_lang} to {target_lang}: {text}" | |
inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True) | |
outputs = model.generate(**inputs) | |
translation = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return translation | |
# List of Indian languages | |
indian_languages = [ | |
"as", "bn", "gu", "hi", "kn", "ml", "mr", "or", "pa", "ta", "te", "ur" | |
] | |
# Supported languages | |
languages = ["en", "fr", "de", "es", "it"] + indian_languages | |
# Create Gradio interface | |
def translate_interface(text, source_lang, target_lang): | |
return translate_text(text, source_lang, target_lang) | |
iface = gr.Interface( | |
fn=translate_interface, | |
inputs=[ | |
gr.Textbox(lines=2, placeholder="Enter text to translate"), | |
gr.Dropdown(choices=languages, label="Source Language"), | |
gr.Dropdown(choices=languages, label="Target Language") | |
], | |
outputs="text", | |
title="Hugging Face Translation App", | |
description="Translate text from one language to another using a T5 model." | |
) | |
if __name__ == "__main__": | |
iface.launch() | |