Pragformer commited on
Commit
83fdac1
·
1 Parent(s): f902090

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -33
app.py CHANGED
@@ -10,7 +10,6 @@ pragformer_reduction = transformers.AutoModel.from_pretrained("Pragformer/PragFo
10
 
11
 
12
  #Event Listeners
13
-
14
  tokenizer = transformers.AutoTokenizer.from_pretrained('NTUYG/DeepSCC-RoBERTa')
15
 
16
  with open('c_data.json', 'r') as f:
@@ -35,37 +34,43 @@ def predict(code_txt):
35
 
36
 
37
  def is_private(code_txt):
38
- code = code_txt.lstrip().rstrip()
39
- tokenized = tokenizer.batch_encode_plus(
40
- [code],
41
- max_length = 150,
42
- pad_to_max_length = True,
43
- truncation = True
44
- )
45
- pred = pragformer_private(torch.tensor(tokenized['input_ids']), torch.tensor(tokenized['attention_mask']))
46
-
47
- y_hat = torch.argmax(pred).item()
48
- if y_hat == 0:
49
- return gr.update(visible=False)
50
- else:
51
- return gr.update(value=f"Confidence: {torch.nn.Softmax(dim=1)(pred).squeeze()[y_hat].item()}", visible=True)
52
-
53
-
54
- def is_reduction(code_txt):
55
- code = code_txt.lstrip().rstrip()
56
- tokenized = tokenizer.batch_encode_plus(
57
- [code],
58
- max_length = 150,
59
- pad_to_max_length = True,
60
- truncation = True
61
- )
62
- pred = pragformer_reduction(torch.tensor(tokenized['input_ids']), torch.tensor(tokenized['attention_mask']))
63
-
64
- y_hat = torch.argmax(pred).item()
65
- if y_hat == 0:
66
- return gr.update(visible=False)
67
- else:
68
- return gr.update(value=f"Confidence: {torch.nn.Softmax(dim=1)(pred).squeeze()[y_hat].item()}", visible=True)
 
 
 
 
 
 
69
 
70
 
71
  # Define GUI
@@ -102,7 +107,7 @@ with gr.Blocks() as pragformer_gui:
102
  gr.Markdown("## Input")
103
  with gr.Row():
104
  with gr.Column():
105
- drop = gr.Dropdown(list(data.keys()), label="Random Code Snippet")
106
  sample_btn = gr.Button("Sample")
107
 
108
  pragma = gr.Textbox(label="Pragma")
 
10
 
11
 
12
  #Event Listeners
 
13
  tokenizer = transformers.AutoTokenizer.from_pretrained('NTUYG/DeepSCC-RoBERTa')
14
 
15
  with open('c_data.json', 'r') as f:
 
34
 
35
 
36
  def is_private(code_txt):
37
+ if predict(code_txt)[0] == 'Without OpenMP':
38
+ return gr.update(visible=False)
39
+
40
+ code = code_txt.lstrip().rstrip()
41
+ tokenized = tokenizer.batch_encode_plus(
42
+ [code],
43
+ max_length = 150,
44
+ pad_to_max_length = True,
45
+ truncation = True
46
+ )
47
+ pred = pragformer_private(torch.tensor(tokenized['input_ids']), torch.tensor(tokenized['attention_mask']))
48
+
49
+ y_hat = torch.argmax(pred).item()
50
+ if y_hat == 0:
51
+ return gr.update(visible=False)
52
+ else:
53
+ return gr.update(value=f"Confidence: {torch.nn.Softmax(dim=1)(pred).squeeze()[y_hat].item()}", visible=True)
54
+
55
+
56
+ def is_reduction(code_txt, label):
57
+ if predict(code_txt)[0] == 'Without OpenMP':
58
+ return gr.update(visible=False)
59
+
60
+ code = code_txt.lstrip().rstrip()
61
+ tokenized = tokenizer.batch_encode_plus(
62
+ [code],
63
+ max_length = 150,
64
+ pad_to_max_length = True,
65
+ truncation = True
66
+ )
67
+ pred = pragformer_reduction(torch.tensor(tokenized['input_ids']), torch.tensor(tokenized['attention_mask']))
68
+
69
+ y_hat = torch.argmax(pred).item()
70
+ if y_hat == 0:
71
+ return gr.update(visible=False)
72
+ else:
73
+ return gr.update(value=f"Confidence: {torch.nn.Softmax(dim=1)(pred).squeeze()[y_hat].item()}", visible=True)
74
 
75
 
76
  # Define GUI
 
107
  gr.Markdown("## Input")
108
  with gr.Row():
109
  with gr.Column():
110
+ drop = gr.Dropdown(list(data.keys()), label="Random Code Snippet", value="LLNL/AutoParBench/benchmarks/Autopar/NPB3.0-omp-c/BT/bt/129")
111
  sample_btn = gr.Button("Sample")
112
 
113
  pragma = gr.Textbox(label="Pragma")