Spaces:
Running
Running
Update Dockerfile
Browse files- Dockerfile +30 -5
Dockerfile
CHANGED
@@ -18,7 +18,9 @@ RUN apt-get update && apt-get install -y \
|
|
18 |
build-essential \
|
19 |
libffi-dev \
|
20 |
cmake \
|
21 |
-
libcurl4-openssl-dev
|
|
|
|
|
22 |
apt-get clean
|
23 |
|
24 |
# Upgrade pip and install dependencies
|
@@ -29,10 +31,6 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
29 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
30 |
|
31 |
|
32 |
-
RUN systemctl start ollama
|
33 |
-
|
34 |
-
# Download the required model
|
35 |
-
RUN ollama pull llama3
|
36 |
|
37 |
# Copy the entire application
|
38 |
COPY . .
|
@@ -40,5 +38,32 @@ COPY . .
|
|
40 |
# Set proper permissions for the translations directory
|
41 |
RUN chmod -R 777 translations
|
42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
# Define the command to run the application
|
44 |
CMD ["python", "./run.py"]
|
|
|
18 |
build-essential \
|
19 |
libffi-dev \
|
20 |
cmake \
|
21 |
+
libcurl4-openssl-dev \
|
22 |
+
tini \
|
23 |
+
systemd && \
|
24 |
apt-get clean
|
25 |
|
26 |
# Upgrade pip and install dependencies
|
|
|
31 |
RUN curl -fsSL https://ollama.com/install.sh | sh
|
32 |
|
33 |
|
|
|
|
|
|
|
|
|
34 |
|
35 |
# Copy the entire application
|
36 |
COPY . .
|
|
|
38 |
# Set proper permissions for the translations directory
|
39 |
RUN chmod -R 777 translations
|
40 |
|
41 |
+
# Create the Ollama user
|
42 |
+
RUN useradd -r -s /bin/false -m -d /usr/share/ollama ollama
|
43 |
+
|
44 |
+
# Create the service file
|
45 |
+
RUN echo "[Unit]\n\
|
46 |
+
Description=Ollama Service\n\
|
47 |
+
After=network-online.target\n\n\
|
48 |
+
[Service]\n\
|
49 |
+
ExecStart=/usr/bin/ollama serve\n\
|
50 |
+
User=ollama\n\
|
51 |
+
Group=ollama\n\
|
52 |
+
Restart=always\n\
|
53 |
+
RestartSec=3\n\n\
|
54 |
+
[Install]\n\
|
55 |
+
WantedBy=default.target" > /etc/systemd/system/ollama.service
|
56 |
+
|
57 |
+
# Enable the service
|
58 |
+
RUN systemctl enable ollama
|
59 |
+
|
60 |
+
# Copy the entrypoint script
|
61 |
+
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
|
62 |
+
RUN chmod +x /usr/local/bin/entrypoint.sh
|
63 |
+
|
64 |
+
# Define the entrypoint to use tini and the custom script
|
65 |
+
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
66 |
+
# Download the required model
|
67 |
+
RUN ollama pull llama3
|
68 |
# Define the command to run the application
|
69 |
CMD ["python", "./run.py"]
|