ejschwartz commited on
Commit
e3cd87e
·
verified ·
1 Parent(s): e9bf45f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -8
app.py CHANGED
@@ -9,6 +9,7 @@ tokenizer = AutoTokenizer.from_pretrained(model)
9
 
10
  # predict summary
11
  def predict_summary(tokenizer,code):
 
12
  input = tokenizer('summarize: '+code,return_tensors='pt',max_length=max_input_length,truncation=True)
13
  output = model.generate(**input,max_new_tokens=256)[0]
14
  return tokenizer.decode(output,skip_special_tokens=True)
@@ -16,6 +17,7 @@ def predict_summary(tokenizer,code):
16
 
17
  # predict identifier (func name)
18
  def predict_identifier(tokenizer,code):
 
19
  '''
20
  code should be like: "unsigned __int8 *__cdecl <func>(int *<var_0>,...){ return <func_1>(1);}"
21
  '''
@@ -23,14 +25,6 @@ def predict_identifier(tokenizer,code):
23
  output = model.generate(**input)[0]
24
  return tokenizer.decode(output)
25
 
26
-
27
- # Define the inference function
28
- def generate_text(prompt):
29
- inputs = tokenizer(prompt, return_tensors="pt")
30
- outputs = model.generate(**inputs, max_length=100)
31
- generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
32
- return generated_text
33
-
34
  # Create the Gradio interface
35
  iface = gr.Interface(
36
  fn=predict_identifier,
 
9
 
10
  # predict summary
11
  def predict_summary(tokenizer,code):
12
+ global model
13
  input = tokenizer('summarize: '+code,return_tensors='pt',max_length=max_input_length,truncation=True)
14
  output = model.generate(**input,max_new_tokens=256)[0]
15
  return tokenizer.decode(output,skip_special_tokens=True)
 
17
 
18
  # predict identifier (func name)
19
  def predict_identifier(tokenizer,code):
20
+ gloabl model
21
  '''
22
  code should be like: "unsigned __int8 *__cdecl <func>(int *<var_0>,...){ return <func_1>(1);}"
23
  '''
 
25
  output = model.generate(**input)[0]
26
  return tokenizer.decode(output)
27
 
 
 
 
 
 
 
 
 
28
  # Create the Gradio interface
29
  iface = gr.Interface(
30
  fn=predict_identifier,