FROM ghcr.io/ggerganov/llama.cpp:server-cuda | |
RUN apt update && apt install -y curl | |
RUN mkdir /models | |
RUN curl -L https://huggingface.co/TheBloke/deepseek-coder-6.7B-instruct-GGUF/resolve/main/deepseek-coder-6.7b-instruct.Q6_K.gguf --output /models/deepseek-coder-6.7b-instruct.Q6_K.gguf | |
ENTRYPOINT [ "/server" ] | |
CMD [ "-ngl", "32", "--host", "0.0.0.0", "--port", "7860", "--model", "/models/deepseek-coder-6.7b-instruct.Q6_K.gguf", "-c", "8192", "--chat-template", "deepseek" ] | |