Spaces:
Sleeping
Sleeping
import torch | |
import gradio as gr | |
# Use a pipeline as a high-level helper | |
from transformers import pipeline | |
# model_path = "../Models/models--deepset--roberta-base-squad2/snapshots/adc3b06f79f797d1c575d5479d6f5efe54a9e3b4" | |
question_answer = pipeline("question-answering", model="deepset/roberta-base-squad2") | |
# question_answer = pipeline("question-answering", model=model_path) | |
def read_file_content(file_obj): | |
""" | |
Reads the content of a file object and returns it. | |
Parameters: | |
file_obj (file object): The file object to read from. | |
Returns: | |
str: The content of the file. | |
""" | |
try: | |
with open(file_obj.name, 'r', encoding='utf-8') as file: | |
context = file.read() | |
return context | |
except Exception as e: | |
return f"An error occurred: {e}" | |
def get_answer(file, question): | |
context = read_file_content(file) | |
answer = question_answer(question=question, context=context) | |
return answer["answer"] | |
demo = gr.Interface( | |
fn=get_answer, | |
inputs=[gr.File(label="Upload your file"), gr.Textbox(label="Input your question", lines=1)], | |
outputs=[gr.Textbox(label="Answer text", lines=1)], | |
title="Project 04: Document QnA", | |
description="As understood from the title, if not already, this application will provide answer to your question " | |
"based on the context provided" | |
) | |
demo.launch() | |