Spaces:
Running
on
Zero
Running
on
Zero
skip_prompt
Browse files
app.py
CHANGED
|
@@ -7,7 +7,7 @@ model = AutoModelForCausalLM.from_pretrained("google/gemma-7b",token=token)
|
|
| 7 |
streamer = TextStreamer(tokenizer,skip_prompt=True)
|
| 8 |
|
| 9 |
|
| 10 |
-
def generate(inputs):
|
| 11 |
inputs = tokenizer([inputs], return_tensors="pt")
|
| 12 |
yield model.generate(**inputs, streamer=streamer)
|
| 13 |
|
|
|
|
| 7 |
streamer = TextStreamer(tokenizer,skip_prompt=True)
|
| 8 |
|
| 9 |
|
| 10 |
+
def generate(inputs,history):
|
| 11 |
inputs = tokenizer([inputs], return_tensors="pt")
|
| 12 |
yield model.generate(**inputs, streamer=streamer)
|
| 13 |
|