import gradio as gr from transformers import T5Tokenizer, T5ForConditionalGeneration import torch # Load the fine-tuned summarisation model model = T5ForConditionalGeneration.from_pretrained("drelhaj/FinAraT5") tokenizer = T5Tokenizer.from_pretrained("drelhaj/FinAraT5") # Assumes spiece.model is present on the Hub # Define summarisation logic def summarise(text): text = text.strip() if not text.startswith("لخص:"): text = "لخص: " + text inputs = tokenizer(text, return_tensors="pt", truncation=True) with torch.no_grad(): outputs = model.generate(**inputs, max_length=64) return tokenizer.decode(outputs[0], skip_special_tokens=True) # Gradio interface demo = gr.Interface( fn=summarise, inputs=gr.Textbox(lines=4, placeholder="أدخل نصًا ماليًا باللغة العربية..."), outputs="text", title="FinAraT5 – Arabic Financial News Summarisation", description="يستخدم هذا النموذج لإنشاء ملخصات قصيرة للأخبار المالية العربية. إذا لم يبدأ الإدخال بـ 'لخص:' فسيتم إضافته تلقائيًا." ) # Launch the app demo.launch()