Exceedea / app.py
EladSpamson's picture
Update app.py
84b9b91 verified
raw
history blame
640 Bytes
from flask import Flask, request, jsonify
import requests
import torch
import librosa
import os
from transformers import WhisperProcessor, WhisperForConditionalGeneration
# Set Hugging Face cache directory explicitly
os.environ["HF_HOME"] = "/tmp/hf_cache"
app = Flask(__name__)
model_id = "openai/whisper-base"
processor = WhisperProcessor.from_pretrained(model_id)
model = WhisperForConditionalGeneration.from_pretrained(model_id)
device = "cuda" if torch.cuda.is_available() else "cpu"
model.to(device)
forced_decoder_ids = processor.get_decoder_prompt_ids(language="he", task="transcribe")
# rest of the code remains unchanged...