Spaces:
Runtime error
Runtime error
| # This is the same dockerfile from `Helper_Files/Dockerfiles/tldw-nvidia_amd64_Dockerfile`. c/p here so people see a 'Dockerfile' in the root directory and know what to do. | |
| # Usage | |
| # docker build -t tldw-nvidia_amd64 . | |
| # docker run --gpus=all -p 7860:7860 -v tldw_volume:/tldw tldw-nvidia_amd64 | |
| # | |
| # If the above command doesn't work and it hangs on start, use the following command: | |
| # | |
| # sudo docker run -it -p 7860:7860 -v tldw_volume:/tdlw tldw-nvidia_amd64 bash | |
| # | |
| # Once in the container, run the following command: | |
| # | |
| # python summarize.py -gui | |
| # | |
| # And you should be good. | |
| # Use Nvidia image: | |
| FROM nvidia/cuda:12.6.1-cudnn-runtime-ubuntu24.04 | |
| # Use an official Python runtime as a parent image | |
| #FROM python:3.10.15-slim-bookworm | |
| # Set build arguments | |
| ARG REPO_URL=https://github.com/rmusser01/tldw.git | |
| ARG BRANCH=main | |
| ARG GPU_SUPPORT=cpu | |
| # Install system dependencies | |
| RUN apt-get update && apt-get install -y \ | |
| ffmpeg \ | |
| libsqlite3-dev \ | |
| build-essential \ | |
| git \ | |
| python3 \ | |
| python3-pyaudio \ | |
| portaudio19-dev \ | |
| python3-pip \ | |
| portaudio19-dev \ | |
| python3-venv \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Set the working directory in the container | |
| WORKDIR /tldw | |
| # Clone the repository | |
| RUN git clone -b ${BRANCH} ${REPO_URL} . | |
| # Create and activate virtual environment | |
| RUN python3 -m venv ./venv | |
| ENV PATH="/tldw/venv/bin:$PATH" | |
| # Upgrade pip and install wheel | |
| RUN pip install --upgrade pip wheel | |
| # Install CUDA | |
| RUN pip install nvidia-cublas-cu12 nvidia-cudnn-cu12 | |
| # setup PATH | |
| RUN export LD_LIBRARY_PATH=`python3 -c 'import os; import nvidia.cublas.lib; import nvidia.cudnn.lib; print(os.path.dirname(nvidia.cublas.lib.__file__) + ":" + os.path.dirname(nvidia.cudnn.lib.__file__))'` | |
| # Install PyTorch based on GPU support | |
| RUN if [ "$GPU_SUPPORT" = "cuda" ]; then \ | |
| pip install torch==2.2.2 torchvision==0.17.2 torchaudio==2.2.2 --index-url https://download.pytorch.org/whl/cu123; \ | |
| elif [ "$GPU_SUPPORT" = "amd" ]; then \ | |
| pip install torch-directml; \ | |
| else \ | |
| pip install torch==2.2.2 torchvision==0.17.2 torchaudio==2.2.2 --index-url https://download.pytorch.org/whl/cpu; \ | |
| fi | |
| # Install other requirements | |
| RUN pip install -r requirements.txt | |
| # Update config.txt for CPU if needed | |
| RUN if [ "$GPU_SUPPORT" = "cpu" ]; then \ | |
| sed -i 's/cuda/cpu/' ./Config_Files/config.txt; \ | |
| fi | |
| # Create a volume for persistent storage | |
| VOLUME /tldw | |
| # Make port 7860 available to the world outside this container | |
| EXPOSE 7860 | |
| # Set listening to all interfaces | |
| ENV GRADIO_SERVER_NAME="0.0.0.0" | |
| # Run the application | |
| CMD ["python", "summarize.py", "-gui", "-log DEBUG"] |