my-AI-api / app.py
TKgumi's picture
Update app.py
037dd6e verified
raw
history blame contribute delete
715 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
model_name = "EQUES/TinyDeepSeek-1.5B"
# トークナイザーのロード
tokenizer = AutoTokenizer.from_pretrained(model_name)
# モデルのロード(無償プランのメモリ制限を考慮してCPUにロード)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
# テスト用の入力
input_text = "Explain large language models in simple terms."
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
# 推論実行
output = model.generate(input_ids, max_length=100)
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
print(generated_text)