MohamedRashad commited on
Commit
118bfa5
Β·
1 Parent(s): 008db80

Update inference decorator to use GPU() from spaces for improved performance

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -284,7 +284,7 @@ processing_results = {
284
  'markdown_content': None,
285
  'raw_output': None,
286
  }
287
- @spaces.gpu
288
  def inference(image: Image.Image, prompt: str, max_new_tokens: int = 24000) -> str:
289
  """Run inference on an image with the given prompt"""
290
  try:
 
284
  'markdown_content': None,
285
  'raw_output': None,
286
  }
287
+ @spaces.GPU()
288
  def inference(image: Image.Image, prompt: str, max_new_tokens: int = 24000) -> str:
289
  """Run inference on an image with the given prompt"""
290
  try: