miracFence commited on
Commit
e3a0c35
Β·
verified Β·
1 Parent(s): a295415

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -47,9 +47,9 @@ def generate(
47
  conversation.append({"role": "user", "content": message})
48
 
49
  input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
50
- if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
51
- input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
52
- gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")
53
  input_ids = input_ids.to(model.device)
54
 
55
  streamer = TextIteratorStreamer(tokenizer, timeout=20.0, skip_prompt=True, skip_special_tokens=True)
 
47
  conversation.append({"role": "user", "content": message})
48
 
49
  input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt")
50
+ if input_ids.shape[1] > 4096:
51
+ input_ids = input_ids[:, -4096:]
52
+ gr.Warning(f"Trimmed input from conversation as it was longer than {4096} tokens.")
53
  input_ids = input_ids.to(model.device)
54
 
55
  streamer = TextIteratorStreamer(tokenizer, timeout=20.0, skip_prompt=True, skip_special_tokens=True)