FROM python:latest ENV PYTHONUNBUFFERED 1 EXPOSE 8080 WORKDIR /app RUN wget -qO- "https://cmake.org/files/v3.17/cmake-3.17.0-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local RUN CMAKE_ARGS="-DLLAMA_OPENBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python RUN pip install -r requirements.txt RUN curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash RUN apt-get install git-lfs RUN git clone https://huggingface.co/TheBloke/wizardLM-7B-GGML COPY . . RUN ls -al CMD uvicorn main:app --host 0.0.0.0 --port 8080 --workers 2