FROM ollama/ollama:latest # Install Python and pip RUN apt update && apt install -y python3 python3-pip # Install litellm and its proxy dependencies RUN pip install 'litellm[proxy]' # Create a directory for Ollama data RUN mkdir -p /.ollama && chmod -R 777 /.ollama WORKDIR /.ollama # Copy the entry point script COPY entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh # Set the entry point script as the default command ENTRYPOINT ["/entrypoint.sh"] # Expose the port that Ollama runs on EXPOSE 7860 # Set the model name as an environment variable (this can be overridden) ENV MODEL_NAME=your_model_name_here