Tonic commited on
Commit
d8e35bf
Β·
unverified Β·
1 Parent(s): 7bd644e

fixes pad token id issue

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -106,7 +106,11 @@ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloa
106
 
107
  # Set generation config
108
  model.generation_config = GenerationConfig.from_pretrained(model_name)
109
- model.generation_config.pad_token_id = model.generation_config.eos_token_id
 
 
 
 
110
  model.generation_config.do_sample = True
111
  model.generation_config.temperature = 0.6
112
  model.generation_config.top_p = 0.95
 
106
 
107
  # Set generation config
108
  model.generation_config = GenerationConfig.from_pretrained(model_name)
109
+ # Ensure pad_token_id is an integer, not a list
110
+ if isinstance(model.generation_config.eos_token_id, list):
111
+ model.generation_config.pad_token_id = model.generation_config.eos_token_id[0]
112
+ else:
113
+ model.generation_config.pad_token_id = model.generation_config.eos_token_id
114
  model.generation_config.do_sample = True
115
  model.generation_config.temperature = 0.6
116
  model.generation_config.top_p = 0.95