pp542-0965 commited on
Commit
202b450
·
1 Parent(s): c52a50c

remove torch

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -1,7 +1,5 @@
1
  import re
2
 
3
- import torch
4
-
5
  import gradio as gr
6
 
7
  from peft import PeftModel
@@ -86,8 +84,7 @@ def response(user_schemas, user_question):
86
  return_dict=True,
87
  return_tensors="pt")
88
 
89
- with torch.inference_mode():
90
- outputs = model.generate(**inputs, max_new_tokens=1024)
91
 
92
  outputs = tokenizer.batch_decode(outputs)
93
  output = outputs[0].split("<|im_start|>assistant")[-1]
 
1
  import re
2
 
 
 
3
  import gradio as gr
4
 
5
  from peft import PeftModel
 
84
  return_dict=True,
85
  return_tensors="pt")
86
 
87
+ outputs = model.generate(**inputs, max_new_tokens=1024)
 
88
 
89
  outputs = tokenizer.batch_decode(outputs)
90
  output = outputs[0].split("<|im_start|>assistant")[-1]