import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load your model and tokenizer from Hugging Face model_name = "Addaci/byt5-small-finetuned-yiddish-experiment-10" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) def correct_yiddish_transcription(input_text): inputs = tokenizer(input_text, return_tensors="pt", truncation=True) outputs = model.generate(**inputs) corrected_text = tokenizer.decode(outputs[0], skip_special_tokens=True) return corrected_text # Create Gradio Interface interface = gr.Interface( fn=correct_yiddish_transcription, inputs=gr.Textbox(lines=5, label="Input Yiddish Text"), outputs=gr.Textbox(label="Corrected Text"), title="Yiddish Transcription Correction", description="Corrects raw Yiddish machine transcription using a fine-tuned ByT5 model." ) interface.launch()