Update entrypoint.sh
Browse files- entrypoint.sh +9 -6
entrypoint.sh
CHANGED
@@ -1,18 +1,21 @@
|
|
1 |
#!/bin/sh
|
2 |
|
3 |
-
|
4 |
-
# Ensure Ollama has the necessary directory
|
5 |
export OLLAMA_HOME=/ollama-data
|
6 |
|
7 |
-
|
8 |
# Start the Ollama server in the background
|
9 |
ollama serve &
|
10 |
|
11 |
-
# Wait for Ollama to initialize
|
12 |
sleep 5
|
13 |
|
14 |
-
# Preload
|
|
|
15 |
ollama run hf.co/abanm/Dubs-Q8_0-GGUF &
|
16 |
|
17 |
-
#
|
|
|
|
|
|
|
18 |
uvicorn app:app --host 0.0.0.0 --port 7860
|
|
|
|
1 |
#!/bin/sh
|
2 |
|
3 |
+
# Ensure Ollama uses our /ollama-data directory
|
|
|
4 |
export OLLAMA_HOME=/ollama-data
|
5 |
|
|
|
6 |
# Start the Ollama server in the background
|
7 |
ollama serve &
|
8 |
|
9 |
+
# Wait a few seconds for Ollama to initialize
|
10 |
sleep 5
|
11 |
|
12 |
+
# (Optional) Preload a Hugging Face model (example: hf.co/abanm/Dubs-Q8_0-GGUF)
|
13 |
+
# If you want to preload a specific HF model, uncomment the next line:
|
14 |
ollama run hf.co/abanm/Dubs-Q8_0-GGUF &
|
15 |
|
16 |
+
# Switch to the app directory, just in case
|
17 |
+
cd /app
|
18 |
+
|
19 |
+
# Start the FastAPI app via Uvicorn
|
20 |
uvicorn app:app --host 0.0.0.0 --port 7860
|
21 |
+
|