File size: 383 Bytes
252f973
 
2a56ef6
252f973
2a56ef6
252f973
 
2a56ef6
252f973
 
1
2
3
4
5
6
7
8
9
10
# Use a pipeline as a high-level helper
from transformers import pipeline

pipe = pipeline("text-generation", model="THUDM/LongWriter-llama3.1-8b")

# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("THUDM/LongWriter-llama3.1-8b")
model = AutoModelForCausalLM.from_pretrained("THUDM/LongWriter-llama3.1-8b")