nugentc commited on
Commit
0d07fde
·
1 Parent(s): fd58ec5
Files changed (1) hide show
  1. app.py +3 -5
app.py CHANGED
@@ -23,17 +23,15 @@ import gradio as gr
23
  def chat(message, history=[]):
24
  new_user_input_ids = tokenizer.encode(message+tokenizer.eos_token, return_tensors='pt')
25
  if len(history) > 0:
26
- last_set_of_ids = history[len(history)-1][3]
27
  bot_input_ids = torch.cat([last_set_of_ids, new_user_input_ids], dim=-1)
28
  else:
29
  print("HERE WE GO! ", new_user_input_ids)
30
  bot_input_ids = new_user_input_ids
31
- chat_history_ids = model.generate(bot_input_ids, max_length=500, pad_token_id=tokenizer.eos_token_id)
32
  response_ids = chat_history_ids[:, bot_input_ids.shape[-1]:][0]
33
  response = tokenizer.decode(response_ids, skip_special_tokens=True)
34
- # response = tokenizer.decode(response_ids).replace("<|endoftext|>", "")
35
- bot_input_ids = torch.cat([bot_input_ids, response_ids], dim=-1)
36
- history.push((message, response, bot_input_ids))
37
  return response, history, feedback(message)
38
 
39
 
 
23
  def chat(message, history=[]):
24
  new_user_input_ids = tokenizer.encode(message+tokenizer.eos_token, return_tensors='pt')
25
  if len(history) > 0:
26
+ last_set_of_ids = history[len(history)-1][2]
27
  bot_input_ids = torch.cat([last_set_of_ids, new_user_input_ids], dim=-1)
28
  else:
29
  print("HERE WE GO! ", new_user_input_ids)
30
  bot_input_ids = new_user_input_ids
31
+ chat_history_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
32
  response_ids = chat_history_ids[:, bot_input_ids.shape[-1]:][0]
33
  response = tokenizer.decode(response_ids, skip_special_tokens=True)
34
+ history.push((message, response, chat_history_id))
 
 
35
  return response, history, feedback(message)
36
 
37