# Use the official lightweight Python image. # https://hub.docker.com/_/python FROM python:3.10-slim # Copy local code to the container image. ENV APP_HOME /app WORKDIR $APP_HOME # COPY . ./ RUN apt update && apt install -y \ libgl1-mesa-glx \ libglib2.0-dev \ git # Install production dependencies. RUN pip install --upgrade pip RUN git clone https://github.com/LiteraturePro/MODNet.git RUN cp -r MODNet/* ./ RUN pip install -r requirements.txt # CPU #RUN pip install torch==1.7.1+cpu torchvision==0.8.2+cpu torchaudio==0.7.2 -f https://download.pytorch.org/whl/torch_stable.html RUN pip install torch==1.13.0+cpu torchvision==0.14.0+cpu torchaudio==0.13.0 --extra-index-url https://download.pytorch.org/whl/cpu # Run the web service on container startup. Here we use the gunicorn # webserver, with one worker process and 8 threads. # For environments with multiple CPU cores, increase the number of workers # to be equal to the cores available. # CMD exec gunicorn --bind 0.0.0.0:7860 --workers 1 --threads 8 --timeout 0 app:app # CMD exec gunicorn --bind 0.0.0.0:$PORT --workers 1 --threads 8 --timeout 0 app:app CMD ["streamlit","run streamlit_app.py", "--server.port","7860"]