FROM ollama/ollama:latest RUN apt-get update && apt-get upgrade -y && \ apt-get install -y wget git lsof net-tools curl iproute2 netcat RUN curl -fsSL https://ollama.com/install.sh | sh # Create a new user 'user' with UID 1000 and set the home directory RUN useradd -m -u 1000 user # Set the working directory to /app WORKDIR /app # Copy the requirements.txt file and change its ownership to 'user' COPY --chown=user ./run_ollama.sh run_ollama.sh # Install the required packages in non-interactive mode and without caching # RUN pip install --no-cache-dir --upgrade -r requirements.txt # Install the Hugging Face Hub CLI and login using the provided token # RUN pip install huggingface-hub && \ # huggingface-cli login --token $HUGGINGFACE_TOKEN # Install the llama-cpp-python package with the 'server' extra # RUN pip install 'llama-cpp-python[server]' # Copy the rest of the application files and change their ownership to 'user' COPY --chown=user . /app # Switch to the 'user' user USER user # Set the HOME and PATH environment variables for the user ENV HOME=/home/user ENV PATH=$HOME/.local/bin:$PATH ENV OLLAMA_HOST=0.0.0.0 ENV OLLAMA_ORIGINS="*" WORKDIR /app CMD ["ollama", "serve"]