t4ai's picture
Update app.py
adcef5f
##############################################################
#
# AAI-520 Final Project - Team 3
# University of San Diego, Shiley Marcos School of Engineering
# Copyright 2023 Tyler Foreman, Ahmed Ahmed, Tursun Alkman
#
##############################################################
import re
import json
import gradio as gr
import random
import time
import requests
from transformers import BertModel, BertTokenizerFast, AdamW
API_URL = "https://api-inference.huggingface.co/models/t4ai/distilbert-finetuned-t3-qa"
headers = {}
def query_model(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
# contruct UI using Gradio
_booted = False
with gr.Blocks() as demo:
with gr.Row():
with gr.Column(scale=1):
context = gr.Textbox(label="Document Text", lines=25)
with gr.Column(scale=2):
chatbot = gr.Chatbot(label="T3Soft Bot", value=[(None, "Welcome! I am your QA assistant."), (None, "Please paste your document content in the panel to the left."), (None, "Then submit questions below!")])
msg = gr.Textbox(label="Ask your question")
clear = gr.ClearButton([msg, chatbot])
_chatbot = chatbot
def respond(message, context, chat_history):
if(len(context) == 0):
bot_message = "Hm, I don't see any document text, please paste in the box on the left."
else:
query_bot = query_model({"inputs": {"question": message, "context": context}})
if(len(query_bot) and ("answer" in query_bot)) and (query_bot['score'] > 0.1):
bot_message = query_bot['answer']
else:
bot_message = random.choice(["I'm having trouble with this question, please try rewording and make sure it is relevant to the document.", "Hm, I'm having trouble finding the answer to that. Can you reword the question?", "Sorry, I can't find the answer to this question."])
chat_history.append((message, bot_message))
time.sleep(2)
return "", context, chat_history
msg.submit(respond, [msg, context, chatbot], [msg, context, chatbot])
demo.launch()