ThomasBlumet commited on
Commit
1947635
·
1 Parent(s): 4e9028a

remove inputs_ids in tokenizer.decode

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -28,7 +28,7 @@ def generate_text(input_text,max_new_tokens=512,top_k=50,top_p=0.95,temperature=
28
  output = model.generate(input_ids, attention_mask=attention_mask, max_new_tokens=max_new_tokens, top_k=top_k, top_p=top_p, temperature=temperature,do_sample=True)
29
  else:
30
  output = model.generate(input_ids, attention_mask=attention_mask, max_new_tokens=max_new_tokens, top_k=top_k, top_p=top_p, temperature=temperature,do_sample=True)
31
- return tokenizer.decode(output[0][input_ids['input_ids'].shape[1]:], skip_special_tokens=True)
32
 
33
 
34
  time_story = 0
 
28
  output = model.generate(input_ids, attention_mask=attention_mask, max_new_tokens=max_new_tokens, top_k=top_k, top_p=top_p, temperature=temperature,do_sample=True)
29
  else:
30
  output = model.generate(input_ids, attention_mask=attention_mask, max_new_tokens=max_new_tokens, top_k=top_k, top_p=top_p, temperature=temperature,do_sample=True)
31
+ return tokenizer.decode(output[0], skip_special_tokens=True)
32
 
33
 
34
  time_story = 0