File size: 1,245 Bytes
48cebb5
b3f9249
 
48cebb5
b3f9249
 
dd9e773
b3f9249
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import gradio as gr
import pandas as pd
from transformers import AutoTokenizer, AutoModelForTableQuestionAnswering

# Replace with your actual Hugging Face API key
model_name = "neulab/omnitab-large-finetuned-wtq"
tokenizer = TapexTokenizer
model = AutoModelForTableQuestionAnswering.from_pretrained(model_name)

# Define the question-answering function
def answer_question(csv_file, question):
    # Read the csv file into a pandas DataFrame
    data = pd.read_csv(csv_file.name)

    # Convert the DataFrame into a format the model can understand
    tables = tokenizer.table_encode(data, return_tensors="pt")

    # Tokenize the question
    questions = tokenizer(question, return_tensors="pt")

    # Generate the answer
    outputs = model(questions, tables)
    predicted_answer = tokenizer.batch_decode(outputs.logits, skip_special_tokens=True)

    return predicted_answer[0].strip()

# Create the Gradio interface
gr.Interface(
    fn=answer_question,
    inputs=[
        gr.inputs.File(label="CSV File"),
        gr.inputs.Textbox(lines=2, label="Question")
    ],
    outputs=gr.outputs.Textbox(label="Answer"),
    title="Table Question Answering",
    description="Upload a CSV file and ask a question about the data.",
).launch()