Including cache
Browse files- Dockerfile +17 -4
- main.py +4 -1
Dockerfile
CHANGED
@@ -1,10 +1,23 @@
|
|
1 |
FROM python:3.10.12
|
2 |
|
3 |
-
|
|
|
|
|
|
|
|
|
4 |
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
-
|
|
|
8 |
|
9 |
-
|
|
|
10 |
|
|
|
|
|
|
1 |
FROM python:3.10.12
|
2 |
|
3 |
+
# Create a non-root user
|
4 |
+
RUN useradd -m -u 1000 user
|
5 |
+
|
6 |
+
# Set up the working directory
|
7 |
+
WORKDIR /app
|
8 |
|
9 |
+
# Copy the requirements and install dependencies
|
10 |
+
COPY requirements.txt .
|
11 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
12 |
+
|
13 |
+
# Copy the rest of the application
|
14 |
+
COPY . .
|
15 |
|
16 |
+
# Create and set permissions for the cache directory
|
17 |
+
RUN mkdir /.cache && chown -R user:user /.cache && chmod -R 777 /.cache
|
18 |
|
19 |
+
# Switch to the non-root user
|
20 |
+
USER user
|
21 |
|
22 |
+
# Run the application
|
23 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
main.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
from fastapi import FastAPI, UploadFile
|
2 |
from fastapi.responses import JSONResponse
|
3 |
from fastapi.param_functions import File
|
@@ -6,7 +7,6 @@ from typing import List
|
|
6 |
import io
|
7 |
from facenet_pytorch import MTCNN, InceptionResnetV1
|
8 |
import torch
|
9 |
-
import io
|
10 |
from PIL import Image
|
11 |
|
12 |
app = FastAPI()
|
@@ -19,6 +19,9 @@ app.add_middleware(
|
|
19 |
allow_headers=["*"],
|
20 |
)
|
21 |
|
|
|
|
|
|
|
22 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
23 |
|
24 |
mtcnn = MTCNN(keep_all=True, device=device)
|
|
|
1 |
+
import os
|
2 |
from fastapi import FastAPI, UploadFile
|
3 |
from fastapi.responses import JSONResponse
|
4 |
from fastapi.param_functions import File
|
|
|
7 |
import io
|
8 |
from facenet_pytorch import MTCNN, InceptionResnetV1
|
9 |
import torch
|
|
|
10 |
from PIL import Image
|
11 |
|
12 |
app = FastAPI()
|
|
|
19 |
allow_headers=["*"],
|
20 |
)
|
21 |
|
22 |
+
# Set the cache directory to a writable location
|
23 |
+
os.environ['TORCH_HOME'] = '/tmp/.cache/torch'
|
24 |
+
|
25 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
26 |
|
27 |
mtcnn = MTCNN(keep_all=True, device=device)
|