File size: 2,395 Bytes
d3c3cdf
a9c4623
d3c3cdf
 
a01a63e
 
 
a9c4623
b1f712d
 
d8fa3f9
 
 
b1f712d
 
 
8ec57c7
b1f712d
a01a63e
 
8ec57c7
a01a63e
 
 
9190e7c
a01a63e
 
 
9190e7c
a01a63e
 
 
9190e7c
d3c3cdf
93e8a0e
092a071
b53e63b
d3c3cdf
b53e63b
99c08ee
b53e63b
 
a01a63e
9190e7c
d3c3cdf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import gradio as gr
import torch
from transformers import BartForConditionalGeneration, BartTokenizer

# initialize model + tok variables
model = None
tok = None

# Examples for each models
examples = [
    ["interview-question-remake", ""],
    ["interview-length-tagged","Hello, I'm Advait Scaria. I'm a senior at Calvin studying Computer Science and Data Science as my minor. I'm currently living with Ha-Ram Koo, Matthias Schmit, and Hyechan Jun"],
    ["reverse-interview-question", "There are so many incredible musicians out there and so many really compelling big hits this year that it makes for a really interesting way to recap some of those big events."]
]

# Descriptions for each models
descriptions = ""

# pass in Strings of model choice and input text for context
def genQuestion(model_choice, context):
    
    if model_choice=="interview-question-remake":
        model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-question-remake")
        tok = BartTokenizer.from_pretrained("hyechanjun/interview-question-remake")
        descriptions = "Interview question remake is a model that..."
    elif model_choice=="interview-length-tagged":
        model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-length-tagged")
        tok = BartTokenizer.from_pretrained("hyechanjun/interview-length-tagged")
        descriptions = "Interview question tagged is a model that..."
    elif model_choice=="reverse-interview-question":
        model = BartForConditionalGeneration.from_pretrained("hyechanjun/reverse-interview-question")
        tok = BartTokenizer.from_pretrained("hyechanjun/reverse-interview-question")
        descriptions = "Reverse interview question  is a model that..."

    inputs = tok(context, return_tensors="pt")
    output = model.generate(inputs["input_ids"], num_beams=4, max_length=64, min_length=9, num_return_sequences=4, diversity_penalty =1.0, num_beam_groups=2)
    final_output = ''

    for i in range(4):
        final_output +=  [tok.decode(beam, skip_special_tokens=True, clean_up_tokenization_spaces=False) for beam in output][i] + "\n"

    return final_output

iface = gr.Interface(fn=genQuestion, inputs=[gr.inputs.Dropdown(["interview-question-remake", "interview-length-tagged", "reverse-interview-question"]), "text"], examples=examples, description=descriptions, outputs="text")
iface.launch()