huolongguo10 commited on
Commit
c7a94cb
·
1 Parent(s): 2ad9277

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -2
app.py CHANGED
@@ -2,10 +2,12 @@ from datasets import load_dataset
2
  from evaluate import evaluator
3
  from transformers import AutoModelForSequenceClassification, pipeline, AutoTokenizer
4
  import gradio as gr
5
- data = load_dataset("huolongguo10/insecure",split="train").shuffle(seed=42).select(range(1000))
6
  task_evaluator = evaluator("text-classification")
7
  model = AutoModelForSequenceClassification.from_pretrained("huolongguo10/check_sec")
8
  tokenizer = AutoTokenizer.from_pretrained("huolongguo10/check_sec")
 
 
9
  # 1. Pass a model name or path
10
  eval_results = task_evaluator.compute(
11
  model_or_pipeline=model,
@@ -14,8 +16,17 @@ eval_results = task_evaluator.compute(
14
  label_mapping={"LABEL_0": 0, "LABEL_1": 1},
15
  tokenizer=tokenizer
16
  )
17
-
 
 
 
 
 
 
18
  with gr.Blocks() as demo:
 
19
  gr.JSON(eval_results)
 
 
20
  print(eval_results)
21
  demo.launch()
 
2
  from evaluate import evaluator
3
  from transformers import AutoModelForSequenceClassification, pipeline, AutoTokenizer
4
  import gradio as gr
5
+ data = load_dataset("huolongguo10/insecure",split="train").shuffle(seed=114514).select(range(1000))
6
  task_evaluator = evaluator("text-classification")
7
  model = AutoModelForSequenceClassification.from_pretrained("huolongguo10/check_sec")
8
  tokenizer = AutoTokenizer.from_pretrained("huolongguo10/check_sec")
9
+ model_tiny = AutoModelForSequenceClassification.from_pretrained("huolongguo10/check_sec_tiny")
10
+ tokenizer_tiny = AutoTokenizer.from_pretrained("huolongguo10/check_sec_tiny")
11
  # 1. Pass a model name or path
12
  eval_results = task_evaluator.compute(
13
  model_or_pipeline=model,
 
16
  label_mapping={"LABEL_0": 0, "LABEL_1": 1},
17
  tokenizer=tokenizer
18
  )
19
+ eval_results_tiny = task_evaluator.compute(
20
+ model_or_pipeline=model_tiny,
21
+ data=data,
22
+ input_column="sentence1",
23
+ label_mapping={"LABEL_0": 0, "LABEL_1": 1},
24
+ tokenizer=tokenizer_tiny
25
+ )
26
  with gr.Blocks() as demo:
27
+ gr.Markdown('# Base:')
28
  gr.JSON(eval_results)
29
+ gr.Markdown('# Tiny:')
30
+ gr.JSON(eval_results_tiny)
31
  print(eval_results)
32
  demo.launch()