Update README.md
Browse files
README.md
CHANGED
@@ -56,8 +56,8 @@ Below is a sample code snippet demonstrating how to use LogiLlama with the Huggi
|
|
56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
57 |
|
58 |
# Load tokenizer and model from our repository
|
59 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
60 |
-
model = AutoModelForCausalLM.from_pretrained("
|
61 |
|
62 |
model.to('cuda')
|
63 |
text = "When faced with a complex problem, one must first analyze "
|
|
|
56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
57 |
|
58 |
# Load tokenizer and model from our repository
|
59 |
+
tokenizer = AutoTokenizer.from_pretrained("goppa-ai/Goppa-LogiLlama", trust_remote_code=True)
|
60 |
+
model = AutoModelForCausalLM.from_pretrained("goppa-ai/Goppa-LogiLlama", trust_remote_code=True)
|
61 |
|
62 |
model.to('cuda')
|
63 |
text = "When faced with a complex problem, one must first analyze "
|