fffiloni commited on
Commit
e7ce4db
·
verified ·
1 Parent(s): 8ad240c

Update inference/infer.py

Browse files
Files changed (1) hide show
  1. inference/infer.py +6 -1
inference/infer.py CHANGED
@@ -69,7 +69,12 @@ os.makedirs(stage2_output_dir, exist_ok=True)
69
 
70
  # load tokenizer and model
71
  #device = torch.device(f"cuda:{cuda_idx}" if torch.cuda.is_available() else "cpu")
72
- device = "cuda"
 
 
 
 
 
73
  mmtokenizer = _MMSentencePieceTokenizer("./mm_tokenizer_v0.2_hf/tokenizer.model")
74
  model = AutoModelForCausalLM.from_pretrained(
75
  stage1_model,
 
69
 
70
  # load tokenizer and model
71
  #device = torch.device(f"cuda:{cuda_idx}" if torch.cuda.is_available() else "cpu")
72
+
73
+ # Check if CUDA is available
74
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
75
+ # Now you can use `device` to move your tensors or models to the GPU (if available)
76
+ print(f"Using device: {device}")
77
+
78
  mmtokenizer = _MMSentencePieceTokenizer("./mm_tokenizer_v0.2_hf/tokenizer.model")
79
  model = AutoModelForCausalLM.from_pretrained(
80
  stage1_model,