long-writer / app.py
iamrahu's picture
Update app.py
252f973 verified
raw
history blame
383 Bytes
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-generation", model="THUDM/LongWriter-llama3.1-8b")
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("THUDM/LongWriter-llama3.1-8b")
model = AutoModelForCausalLM.from_pretrained("THUDM/LongWriter-llama3.1-8b")