proKBD commited on
Commit
961363d
·
verified ·
1 Parent(s): eebbbd4

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +7 -47
Dockerfile CHANGED
@@ -1,64 +1,24 @@
1
- # Core dependencies
2
- streamlit==1.31.1
3
- fastapi==0.109.2
4
- uvicorn==0.27.1
5
- python-multipart==0.0.6
6
- python-dotenv==1.0.1
7
- aiofiles==23.2.1
8
-
9
- # Data processing and analysis
10
- pandas==2.2.0
11
- numpy==1.26.3
12
- scikit-learn==1.4.0FROM python:3.9-slim
13
 
14
  WORKDIR /app
15
 
16
  # Install system dependencies
17
  RUN apt-get update && apt-get install -y \
18
  build-essential \
19
- libffi-dev \
20
- python3-dev \
21
  && rm -rf /var/lib/apt/lists/*
22
 
23
- # Copy only requirements first to leverage Docker cache
24
  COPY requirements.txt .
 
25
 
26
- # Install Python dependencies with optimizations
27
- RUN pip install --no-cache-dir pip -U && \
28
- pip install --no-cache-dir -r requirements.txt && \
29
- pip cache purge
30
 
31
  # Copy the rest of the application
32
  COPY . .
33
 
34
- # Create necessary directories
35
- RUN mkdir -p audio_output sentiment_history
36
-
37
- # Download NLTK data
38
- RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
39
-
40
- # Expose the port Streamlit will run on
41
  EXPOSE 8501
42
 
43
  # Command to run the application
44
- CMD ["streamlit", "run", "app.py", "--server.address", "0.0.0.0"]
45
- textblob==0.17.1
46
- vaderSentiment==3.3.2
47
-
48
- # Web scraping and parsing
49
- beautifulsoup4==4.12.2
50
- requests==2.31.0
51
- lxml==4.9.3
52
-
53
- # NLP and ML
54
- nltk==3.8.1
55
- transformers==4.37.2
56
- torch==2.2.0
57
- spacy==3.7.2
58
-
59
- # Visualization and audio
60
- plotly==5.18.0
61
- gTTS==2.5.0
62
-
63
- # Translation
64
- googletrans-py==4.0.0
 
1
+ FROM python:3.10-slim
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  WORKDIR /app
4
 
5
  # Install system dependencies
6
  RUN apt-get update && apt-get install -y \
7
  build-essential \
 
 
8
  && rm -rf /var/lib/apt/lists/*
9
 
10
+ # Copy requirements first to leverage Docker cache
11
  COPY requirements.txt .
12
+ RUN pip install --no-cache-dir -r requirements.txt
13
 
14
+ # Download spaCy model
15
+ RUN python -m spacy download en_core_web_sm
 
 
16
 
17
  # Copy the rest of the application
18
  COPY . .
19
 
20
+ # Expose the port Streamlit runs on
 
 
 
 
 
 
21
  EXPOSE 8501
22
 
23
  # Command to run the application
24
+ CMD ["streamlit", "run", "app.py", "--server.address=0.0.0.0"]