Spaces:
Sleeping
Sleeping
File size: 954 Bytes
3a66347 b95ac18 3a66347 434a3c0 3a66347 70ea69b 3a66347 7a7769a 3a66347 7a7769a 3a66347 c5e8573 3a66347 b95ac18 0f08574 b95ac18 0f08574 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
ARG UBUNTU_VERSION=22.04
FROM ubuntu:$UBUNTU_VERSION as build
RUN apt-get update && \
apt-get install -y pkg-config build-essential git libcurl4-openssl-dev libopenblas-dev
RUN git clone https://github.com/ggerganov/llama.cpp.git
WORKDIR /llama.cpp
# ENV LLAMA_CURL=1
RUN make LLAMA_OPENBLAS=1
FROM ubuntu:$UBUNTU_VERSION as runtime
RUN apt-get update && \
apt-get install -y libcurl4-openssl-dev curl libopenblas-base
COPY --from=build /llama.cpp/server /server
ENV LC_ALL=C.utf8
RUN mkdir /models
RUN curl -L https://huggingface.co/TheBloke/deepseek-coder-1.3b-instruct-GGUF/resolve/main/deepseek-coder-1.3b-instruct.Q4_K_M.gguf --output /models/deepseek-coder-1.3b-instruct.Q4_K_M.gguf
COPY ./public /webui
ENTRYPOINT [ "/server" ]
CMD [ "--host", "0.0.0.0", "--port", "7860", "--model", "/models/deepseek-coder-1.3b-instruct.Q4_K_M.gguf", "-t", "2", "--mlock", "-c", "512", "--chat-template", "deepseek", "--path", "/webui" ]
|