Pipatpong commited on
Commit
a465184
·
1 Parent(s): 739d665

modified: app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -13,8 +13,12 @@ model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code=True,
13
  def generate(text, max_length, num_return_sequences=1):
14
  inputs = tokenizer.encode(text, padding=False, add_special_tokens=False, return_tensors="pt")
15
  outputs = model.generate(inputs, max_length=max_length, num_return_sequences=num_return_sequences)
16
- gen_text = "Assignment : " + tokenizer.decode(outputs[0]).split("#")[0] if "#" else "Assignment : " + tokenizer.decode(outputs[0])
17
- return gen_text
 
 
 
 
18
 
19
 
20
  def extract_functions(text):
@@ -58,6 +62,7 @@ with demo:
58
  examples = [
59
  ["generate a python for sum number"],
60
  ["generate a python function to find max min element of list"],
 
61
  ]
62
 
63
  gr.Examples(examples=examples, inputs=inputs, cache_examples=False)
 
13
  def generate(text, max_length, num_return_sequences=1):
14
  inputs = tokenizer.encode(text, padding=False, add_special_tokens=False, return_tensors="pt")
15
  outputs = model.generate(inputs, max_length=max_length, num_return_sequences=num_return_sequences)
16
+ gen_text = "Assignment : " + tokenizer.decode(outputs[0])
17
+ if gen_text.count("#") > 2:
18
+ split_text = gen_text.split("#", 2)
19
+ return split_text[0] + "#" + split_text[1]
20
+ else:
21
+ return gen_text
22
 
23
 
24
  def extract_functions(text):
 
62
  examples = [
63
  ["generate a python for sum number"],
64
  ["generate a python function to find max min element of list"],
65
+ ["generate a python function to find minimum of two numbers with test case"],
66
  ]
67
 
68
  gr.Examples(examples=examples, inputs=inputs, cache_examples=False)