Spaces:
Running
Running
import streamlit as st | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
# Load model and tokenizer | |
def load_model(): | |
model_name = "deepseek-ai/deepseek-coder-1.3b-instruct" # Smaller version | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_name, | |
torch_dtype=torch.float16, | |
device_map="auto" # Automatically assigns to CPU/GPU if available | |
) | |
return model, tokenizer | |
model, tokenizer = load_model() | |