haramkoo commited on
Commit
b1f712d
·
1 Parent(s): 0d9ea65

model descriptions and examples draft01

Browse files
Files changed (1) hide show
  1. app.py +15 -1
app.py CHANGED
@@ -6,17 +6,31 @@ from transformers import BartForConditionalGeneration, BartTokenizer
6
  model = None
7
  tok = None
8
 
 
 
 
 
 
 
 
 
 
 
9
  # pass in Strings of model choice and input text for context
10
  def genQuestion(model_choice, context):
 
11
  if model_choice=="interview-question-remake":
12
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-question-remake")
13
  tok = BartTokenizer.from_pretrained("hyechanjun/interview-question-remake")
 
14
  elif model_choice=="interview-length-tagged":
15
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-length-tagged")
16
  tok = BartTokenizer.from_pretrained("hyechanjun/interview-length-tagged")
 
17
  elif model_choice=="reverse-interview-question":
18
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/reverse-interview-question")
19
  tok = BartTokenizer.from_pretrained("hyechanjun/reverse-interview-question")
 
20
 
21
  inputs = tok(context, return_tensors="pt")
22
  output = model.generate(inputs["input_ids"], num_beams=4, max_length=64, min_length=9, num_return_sequences=4, diversity_penalty =1.0, num_beam_groups=2)
@@ -27,5 +41,5 @@ def genQuestion(model_choice, context):
27
 
28
  return final_output
29
 
30
- iface = gr.Interface(fn=genQuestion, inputs=[gr.inputs.Dropdown(["interview-question-remake", "interview-length-tagged", "reverse-interview-question"]), "text"], outputs="text")
31
  iface.launch()
 
6
  model = None
7
  tok = None
8
 
9
+ # Examples for each models
10
+ examples = [
11
+ ["interview-question-remake", ""],
12
+ ["interview-length-tagged",""],
13
+ ["reverse-interview-question", ""]
14
+ ]
15
+
16
+ # Descriptions for each models
17
+
18
+
19
  # pass in Strings of model choice and input text for context
20
  def genQuestion(model_choice, context):
21
+ global description
22
  if model_choice=="interview-question-remake":
23
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-question-remake")
24
  tok = BartTokenizer.from_pretrained("hyechanjun/interview-question-remake")
25
+ description = "Interview question remake is a model that..."
26
  elif model_choice=="interview-length-tagged":
27
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-length-tagged")
28
  tok = BartTokenizer.from_pretrained("hyechanjun/interview-length-tagged")
29
+ description = "Interview question tagged is a model that..."
30
  elif model_choice=="reverse-interview-question":
31
  model = BartForConditionalGeneration.from_pretrained("hyechanjun/reverse-interview-question")
32
  tok = BartTokenizer.from_pretrained("hyechanjun/reverse-interview-question")
33
+ description = "Reverse interview question is a model that..."
34
 
35
  inputs = tok(context, return_tensors="pt")
36
  output = model.generate(inputs["input_ids"], num_beams=4, max_length=64, min_length=9, num_return_sequences=4, diversity_penalty =1.0, num_beam_groups=2)
 
41
 
42
  return final_output
43
 
44
+ iface = gr.Interface(fn=genQuestion, inputs=[gr.inputs.Dropdown(["interview-question-remake", "interview-length-tagged", "reverse-interview-question"]), "text"], examples=examples, description=description, outputs="text")
45
  iface.launch()