File size: 457 Bytes
2809bee bdec963 c815242 bdec963 2809bee bdec963 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
MODEL_NAME = "Lightblue/DeepSeek-R1-Distill-Qwen-7B-Japanese"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
prompt = "γγγ«γ‘γ―γγγγ―γγΉγγ§γγ"
result = generator(prompt, max_length=100)[0]['generated_text']
print(result)
|