Gokulram2710 commited on
Commit
9dd4aba
·
verified ·
1 Parent(s): 675cd9d

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +19 -11
handler.py CHANGED
@@ -8,19 +8,27 @@ class CustomModelHandler:
8
  self.load_model()
9
 
10
  def load_model(self):
11
- self.tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, trust_remote_code=True)
12
- self.model = AutoModelForCausalLM.from_pretrained(
13
- self.model_name_or_path,
14
- trust_remote_code=True,
15
- torch_dtype="auto"
16
- )
17
- self.model.to(self.device)
 
 
 
 
18
 
19
  def predict(self, inputs):
20
- inputs = self.tokenizer(inputs, return_tensors="pt").to(self.device)
21
- outputs = self.model.generate(**inputs)
22
- predictions = self.tokenizer.batch_decode(outputs, skip_special_tokens=True)
23
- return predictions
 
 
 
 
24
 
25
  # Initialize the handler with the model path
26
  handler = CustomModelHandler("microsoft/Phi-3-vision-128k-instruct")
 
8
  self.load_model()
9
 
10
  def load_model(self):
11
+ try:
12
+ self.tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, trust_remote_code=True)
13
+ self.model = AutoModelForCausalLM.from_pretrained(
14
+ self.model_name_or_path,
15
+ trust_remote_code=True,
16
+ torch_dtype="auto"
17
+ )
18
+ self.model.to(self.device)
19
+ except Exception as e:
20
+ print(f"An error occurred while loading the model: {e}")
21
+ raise
22
 
23
  def predict(self, inputs):
24
+ try:
25
+ inputs = self.tokenizer(inputs, return_tensors="pt").to(self.device)
26
+ outputs = self.model.generate(**inputs)
27
+ predictions = self.tokenizer.batch_decode(outputs, skip_special_tokens=True)
28
+ return predictions
29
+ except Exception as e:
30
+ print(f"An error occurred during prediction: {e}")
31
+ raise
32
 
33
  # Initialize the handler with the model path
34
  handler = CustomModelHandler("microsoft/Phi-3-vision-128k-instruct")