File size: 828 Bytes
f1ae702
c1b9e99
f1ae702
 
c1b9e99
 
 
 
 
 
 
f1ae702
 
b338e3a
 
f1ae702
 
 
 
 
 
 
 
 
c1b9e99
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import gradio as gr
from transformers import pipeline

# Load your model
model = pipeline("question-answering", model="DevBM/t5-large-squad")

# Define the prediction function
def predict(question):
    context = "Your context here"  # You need to provide the context for the question
    result = model(question=question, context=context)
    return result['answer']

# Define the input and output interfaces
inputs = gr.Textbox(label="Question")
outputs = gr.Textbox(label="Answer")

# Define examples that users can select from
examples = [
    ["What is the capital of France?", ""],
    ["Who wrote the Harry Potter series?", ""],
    ["What is the largest planet in our solar system?", ""]
]

# Launch the Gradio interface with examples
gr.Interface(fn=predict, inputs=inputs, outputs=outputs, examples=examples).launch()