enricorampazzo commited on
Commit
2720b8d
β€’
1 Parent(s): 596b83f

removing unnecessary dependencies when running in hf, using conditionals to avoid loading npu libraries in hf

Browse files
Dockerfile CHANGED
@@ -5,7 +5,7 @@ RUN source venv/bin/activate
5
  RUN ls -a
6
  RUN mkdir app
7
  COPY . app
8
- RUN mkdir -p app/deps
9
  WORKDIR app
10
- RUN pip install -r requirements.txt
 
11
  ENTRYPOINT streamlit run app.py
 
5
  RUN ls -a
6
  RUN mkdir app
7
  COPY . app
 
8
  WORKDIR app
9
+ RUN rm deps/*.whl
10
+ RUN pip install -r requirements-base.txt
11
  ENTRYPOINT streamlit run app.py
repository/repository.py CHANGED
@@ -1,7 +1,8 @@
1
  from pathlib import Path
 
2
 
3
- from repository.intel_npu import IntelNpuRepository
4
- # from repository.intel_npu import IntelNpuRepository
5
  from repository.ollama import OllamaRepository
6
  from repository.ondemand import OndemandRepository
7
  from repository.repository_abc import Model
@@ -9,13 +10,14 @@ from repository.testing_repo import TestingRepository
9
 
10
 
11
  def get_repository(implementation: str, model: Model, system_msg: str = None, log_to_file: Path = None):
12
- known_implementations = ["ollama", "intel_npu"]
13
- if not implementation or implementation.lower() not in ["ollama", "intel_npu", "testing", "ondemand"]:
14
  raise ValueError(f"Unknown implementation {implementation}. Known implementations: {known_implementations}")
15
  if "ollama" == implementation:
16
  return OllamaRepository(model, system_msg)
17
- if "intel_npu" == implementation:
18
- return IntelNpuRepository(model, system_msg, log_to_file)
 
19
  if "ondemand" == implementation:
20
  return OndemandRepository(model, system_msg, log_to_file)
21
  if "testing" == implementation:
 
1
  from pathlib import Path
2
+ from utils.env_utils import in_hf
3
 
4
+ if not in_hf():
5
+ from repository.intel_npu import IntelNpuRepository
6
  from repository.ollama import OllamaRepository
7
  from repository.ondemand import OndemandRepository
8
  from repository.repository_abc import Model
 
10
 
11
 
12
  def get_repository(implementation: str, model: Model, system_msg: str = None, log_to_file: Path = None):
13
+ known_implementations = ["ollama", "intel_npu", "testing", "ondemand"]
14
+ if not implementation or implementation.lower() not in known_implementations:
15
  raise ValueError(f"Unknown implementation {implementation}. Known implementations: {known_implementations}")
16
  if "ollama" == implementation:
17
  return OllamaRepository(model, system_msg)
18
+ if not in_hf():
19
+ if "intel_npu" == implementation:
20
+ return IntelNpuRepository(model, system_msg, log_to_file)
21
  if "ondemand" == implementation:
22
  return OndemandRepository(model, system_msg, log_to_file)
23
  if "testing" == implementation:
requirements-base.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ transformers
2
+ streamlit
3
+ PyPDFForm
requirements-bin.txt DELETED
File without changes
requirements.txt β†’ requirements-npu.txt RENAMED
@@ -1,6 +1,3 @@
1
- PyPDFForm
2
- ollama
3
- transformers
4
- streamlit
5
  deps/intel_npu_acceleration_library-1.3.0-cp310-cp310-win_amd64.whl ; sys_platform == "windows"
6
  deps/intel_npu_acceleration_library-1.3.0-cp310-cp310-linux_x86_64.whl ; sys_platform == "linux"
 
1
+ -r requirements-base.txt
 
 
 
2
  deps/intel_npu_acceleration_library-1.3.0-cp310-cp310-win_amd64.whl ; sys_platform == "windows"
3
  deps/intel_npu_acceleration_library-1.3.0-cp310-cp310-linux_x86_64.whl ; sys_platform == "linux"
requirements-ollama.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ -r requirements-base.txt
2
+ ollama
utils/env_utils.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import os
2
+ def in_hf() -> bool:
3
+ return os.getenv("env") == "hf"