DevBM's picture
Update app.py
c1b9e99 verified
raw
history blame
828 Bytes
import gradio as gr
from transformers import pipeline
# Load your model
model = pipeline("question-answering", model="DevBM/t5-large-squad")
# Define the prediction function
def predict(question):
context = "Your context here" # You need to provide the context for the question
result = model(question=question, context=context)
return result['answer']
# Define the input and output interfaces
inputs = gr.Textbox(label="Question")
outputs = gr.Textbox(label="Answer")
# Define examples that users can select from
examples = [
["What is the capital of France?", ""],
["Who wrote the Harry Potter series?", ""],
["What is the largest planet in our solar system?", ""]
]
# Launch the Gradio interface with examples
gr.Interface(fn=predict, inputs=inputs, outputs=outputs, examples=examples).launch()