Spaces:
Sleeping
Sleeping
| # Use a stable Python base image | |
| FROM python:3.9-slim | |
| # Set environment variables | |
| ENV PYTHONUNBUFFERED=1 \ | |
| LANG=C.UTF-8 \ | |
| HF_HOME="/app/huggingface_cache" \ | |
| HUGGINGFACE_HUB_CACHE="/app/huggingface_cache" | |
| # Set working directory | |
| WORKDIR /app | |
| # Copy the required files | |
| COPY requirements.txt . | |
| COPY main.py . | |
| # Install dependencies using a virtual environment | |
| RUN python -m venv /app/venv && \ | |
| /app/venv/bin/pip install --no-cache-dir --upgrade pip && \ | |
| /app/venv/bin/pip install --no-cache-dir -r requirements.txt | |
| # Ensure the model cache directory exists | |
| RUN mkdir -p $HF_HOME | |
| # Add Hugging Face token as an environment variable in the container (this will be injected by Hugging Face Spaces' secrets management) | |
| ARG HF_TOKEN | |
| ENV HF_TOKEN=${HF_TOKEN} | |
| # Pre-download models (handle errors gracefully) | |
| RUN /app/venv/bin/python -c "from transformers import pipeline; \ | |
| pipeline('sentiment-analysis', model='tabularisai/multilingual-sentiment-analysis', use_auth_token='$HF_TOKEN')" || echo 'Failed to download model 1' | |
| RUN /app/venv/bin/python -c "from transformers import pipeline; \ | |
| pipeline('sentiment-analysis', model='siebert/sentiment-roberta-large-english', use_auth_token='$HF_TOKEN')" || echo 'Failed to download model 2' | |
| # Expose port for FastAPI | |
| EXPOSE 7860 | |
| # Run FastAPI server using virtual environment | |
| CMD ["/app/venv/bin/uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"] |