jhj0517 commited on
Commit
97cdbae
·
1 Parent(s): 65a3710

change annotation

Browse files
modules/whisper/faster_whisper_inference.py CHANGED
@@ -154,8 +154,8 @@ class FasterWhisperInference(WhisperBase):
154
  return ['float32', 'int8_float16', 'float16', 'int8', 'int8_float32']
155
  return ['int16', 'float32', 'int8', 'int8_float32']
156
 
157
- @spaces.GPU(duration=120)
158
  @staticmethod
 
159
  def get_device():
160
  print("GET DEVICE:")
161
  if torch.cuda.is_available():
@@ -164,6 +164,7 @@ class FasterWhisperInference(WhisperBase):
164
  elif torch.backends.mps.is_available():
165
  return "auto"
166
  else:
 
167
  return "cpu"
168
 
169
  @staticmethod
 
154
  return ['float32', 'int8_float16', 'float16', 'int8', 'int8_float32']
155
  return ['int16', 'float32', 'int8', 'int8_float32']
156
 
 
157
  @staticmethod
158
+ @spaces.GPU
159
  def get_device():
160
  print("GET DEVICE:")
161
  if torch.cuda.is_available():
 
164
  elif torch.backends.mps.is_available():
165
  return "auto"
166
  else:
167
+ print("GET DEVICE: device is cpu")
168
  return "cpu"
169
 
170
  @staticmethod