Pratham Bhat commited on
Commit
03e3488
·
1 Parent(s): b498d8b

Set cache dir

Browse files
Files changed (1) hide show
  1. main.py +1 -1
main.py CHANGED
@@ -67,7 +67,7 @@ def generate(item: Item):
67
  device = "cuda" if torch.cuda.is_available() else "cpu"
68
 
69
  model_path = "ibm-granite/granite-34b-code-instruct-8k"
70
- tokenizer = AutoTokenizer.from_pretrained(model_path)
71
  # drop device_map if running on CPU
72
  model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
73
  model.eval()
 
67
  device = "cuda" if torch.cuda.is_available() else "cpu"
68
 
69
  model_path = "ibm-granite/granite-34b-code-instruct-8k"
70
+ tokenizer = AutoTokenizer.from_pretrained(model_path, cache_dir="/code/huggingface/transformers")
71
  # drop device_map if running on CPU
72
  model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
73
  model.eval()