Spaces:
Runtime error
Runtime error
File size: 664 Bytes
f1b9369 f97659d f1b9369 f97659d f1b9369 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
tokenizer = AutoTokenizer.from_pretrained("georgesung/llama2_7b_chat_uncensored")
model = AutoModelForCausalLM.from_pretrained("georgesung/llama2_7b_chat_uncensored")
def get_response(prompt, max_new_tokens=50):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_new_tokens=max_new_tokens, temperature=0.0001, do_sample=True)
response = tokenizer.decode(outputs[0], skip_special_tokens=True) # Use indexing instead of calling
ans=response.toString()
return ans
|