# Use Ubuntu as the base image FROM ubuntu:22.04 # Set the working directory in the container WORKDIR /app # Install system dependencies and Python RUN apt-get update && apt-get install -y \ python3 \ python3-pip \ curl \ && rm -rf /var/lib/apt/lists/* # Set Python3 as the default RUN ln -s /usr/bin/python3 /usr/bin/python # Install Ollama RUN curl -fsSL https://ollama.com/install.sh | bash # Ensure Ollama is in the system path ENV PATH="/root/.ollama/bin:$PATH" # Pre-download the Llama3 model to avoid downloading it at runtime RUN ollama serve & sleep 5 && ollama pull llama3 # Copy the requirements file and install dependencies COPY requirements.txt ./ RUN pip install --no-cache-dir -r requirements.txt # Copy the application files COPY . . # Expose the FastAPI default port EXPOSE 8000 # Start Ollama and FastAPI CMD ["sh", "-c", "ollama serve & uvicorn main:app --host 0.0.0.0 --port 8000"]