userlocallm commited on
Commit
d9a8c3c
·
verified ·
1 Parent(s): a639627

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +14 -1
  2. requirements.txt +1 -1
app.py CHANGED
@@ -8,6 +8,7 @@ import requests
8
  import logging
9
  import subprocess
10
  from llama_cpp import Llama # Import Llama from llama_cpp
 
11
 
12
  # Configure logging
13
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
@@ -20,9 +21,21 @@ def install_requirements():
20
  except subprocess.CalledProcessError as e:
21
  logging.error(f"Failed to install requirements: {e}")
22
 
 
 
 
 
 
 
 
 
23
  # Install requirements
24
  install_requirements()
25
 
 
 
 
 
26
  # Create the directory if it doesn't exist
27
  local_dir = "models"
28
  os.makedirs(local_dir, exist_ok=True)
@@ -88,4 +101,4 @@ demo = gr.ChatInterface(
88
  )
89
 
90
  if __name__ == "__main__":
91
- demo.launch()
 
8
  import logging
9
  import subprocess
10
  from llama_cpp import Llama # Import Llama from llama_cpp
11
+ import spacy
12
 
13
  # Configure logging
14
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
21
  except subprocess.CalledProcessError as e:
22
  logging.error(f"Failed to install requirements: {e}")
23
 
24
+ # Function to download the spaCy model
25
+ def download_spacy_model(model_name):
26
+ try:
27
+ subprocess.check_call([os.sys.executable, '-m', 'spacy', 'download', model_name])
28
+ logging.info(f"SpaCy model {model_name} downloaded successfully.")
29
+ except subprocess.CalledProcessError as e:
30
+ logging.error(f"Failed to download SpaCy model {model_name}: {e}")
31
+
32
  # Install requirements
33
  install_requirements()
34
 
35
+ # Download the spaCy model if it doesn't exist
36
+ if not spacy.util.is_package('en_core_web_lg'):
37
+ download_spacy_model('en_core_web_lg')
38
+
39
  # Create the directory if it doesn't exist
40
  local_dir = "models"
41
  os.makedirs(local_dir, exist_ok=True)
 
101
  )
102
 
103
  if __name__ == "__main__":
104
+ demo.launch(server_name="0.0.0.0", server_port=7860)
requirements.txt CHANGED
@@ -1,5 +1,5 @@
1
  llama-cpp-python
2
- requests==2.28.1
3
  spacy
4
  scikit-learn
5
  numpy
 
1
  llama-cpp-python
2
+ requests
3
  spacy
4
  scikit-learn
5
  numpy