FROM ubuntu:22.04 ENV DEBIAN_FRONTEND=noninteractive # Update and install necessary dependencies RUN apt update && \ apt install --no-install-recommends -y \ build-essential \ python3 \ python3-pip \ wget \ curl \ git \ cmake \ zlib1g-dev \ libblas-dev && \ apt clean && \ rm -rf /var/lib/apt/lists/* WORKDIR /app RUN wget https://huggingface.co/itsdotscience/Magicoder-S-DS-6.7B-GGUF/resolve/main/Magicoder-S-DS-6.7B_q8_0.gguf RUN git clone https://github.com/ggerganov/llama.cpp.git && \ cd llama.cpp && \ git submodule init && \ git submodule update && \ make # Create a non-root user for security reasons RUN useradd -m -u 1000 user && \ mkdir -p /home/user/app && \ cp /app/Magicoder-S-DS-6.7B_q8_0.gguf /home/user/app RUN chown user:user /home/user/app/Magicoder-S-DS-6.7B_q8_0.gguf USER user ENV HOME=/home/user WORKDIR $HOME/app # Expose the port EXPOSE 8080 # Start the llava-server with models CMD ["/app/llama.cpp/server", "--model", "Magicoder-S-DS-6.7B_q8_0.gguf", "--threads", "12", "--host", "0.0.0.0"]