poltextlab commited on
Commit
5ef3708
·
verified ·
1 Parent(s): bdb2bce

exclude domain

Browse files
Files changed (1) hide show
  1. interfaces/ontolisst.py +2 -6
interfaces/ontolisst.py CHANGED
@@ -12,9 +12,6 @@ HF_TOKEN = os.environ["hf_read"]
12
  languages = [
13
  "English"
14
  ]
15
- domains = {
16
- "parliamentary speech": "parlspeech",
17
- }
18
 
19
  from label_dicts import ONTOLISST_LABEL_NAMES
20
 
@@ -49,7 +46,7 @@ def predict(text, model_id, tokenizer_id):
49
  output_info = f'<p style="text-align: center; display: block">Prediction was made using the <a href="https://huggingface.co/{model_id}">{model_id}</a> model.</p>'
50
  return output_pred, output_info
51
 
52
- def predict_cap(text, language, domain):
53
  model_id = build_huggingface_path(language)
54
  tokenizer_id = "xlm-roberta-large"
55
  return predict(text, model_id, tokenizer_id)
@@ -57,6 +54,5 @@ def predict_cap(text, language, domain):
57
  demo = gr.Interface(
58
  fn=predict_cap,
59
  inputs=[gr.Textbox(lines=6, label="Input"),
60
- gr.Dropdown(languages, label="Language"),
61
- gr.Dropdown(domains.keys(), label="Domain")],
62
  outputs=[gr.Label(num_top_classes=3, label="Output"), gr.Markdown()])
 
12
  languages = [
13
  "English"
14
  ]
 
 
 
15
 
16
  from label_dicts import ONTOLISST_LABEL_NAMES
17
 
 
46
  output_info = f'<p style="text-align: center; display: block">Prediction was made using the <a href="https://huggingface.co/{model_id}">{model_id}</a> model.</p>'
47
  return output_pred, output_info
48
 
49
+ def predict_cap(text, language):
50
  model_id = build_huggingface_path(language)
51
  tokenizer_id = "xlm-roberta-large"
52
  return predict(text, model_id, tokenizer_id)
 
54
  demo = gr.Interface(
55
  fn=predict_cap,
56
  inputs=[gr.Textbox(lines=6, label="Input"),
57
+ gr.Dropdown(languages, label="Language")],
 
58
  outputs=[gr.Label(num_top_classes=3, label="Output"), gr.Markdown()])