willwade commited on
Commit
8173918
·
1 Parent(s): b5ebd65

Fix truncation warnings in text generation

Browse files
Files changed (1) hide show
  1. utils.py +3 -4
utils.py CHANGED
@@ -274,9 +274,7 @@ class SuggestionGenerator:
274
  try:
275
  test_prompt = "I am Will. My son Billy asked about football. I respond:"
276
  print(f"Testing model with prompt: {test_prompt}")
277
- response = self.generator(
278
- test_prompt, max_new_tokens=30, do_sample=True, truncation=True
279
- )
280
  result = response[0]["generated_text"][len(test_prompt) :]
281
  print(f"Test response: {result}")
282
  return f"Model test successful: {result}"
@@ -405,7 +403,8 @@ My response to {name}:"""
405
  do_sample=True,
406
  top_p=0.92,
407
  top_k=50,
408
- truncation=True,
 
409
  )
410
  # Extract only the generated part, not the prompt
411
  result = response[0]["generated_text"][len(prompt) :]
 
274
  try:
275
  test_prompt = "I am Will. My son Billy asked about football. I respond:"
276
  print(f"Testing model with prompt: {test_prompt}")
277
+ response = self.generator(test_prompt, max_new_tokens=30, do_sample=True)
 
 
278
  result = response[0]["generated_text"][len(test_prompt) :]
279
  print(f"Test response: {result}")
280
  return f"Model test successful: {result}"
 
403
  do_sample=True,
404
  top_p=0.92,
405
  top_k=50,
406
+ # Only use truncation if we're providing a max_length
407
+ truncation=False,
408
  )
409
  # Extract only the generated part, not the prompt
410
  result = response[0]["generated_text"][len(prompt) :]