Spaces:
Running
Running
requirements.txt fix
Browse files- app.py +3 -2
- requirements.txt +1 -1
app.py
CHANGED
@@ -5,11 +5,12 @@ from summarization_with_bart import summarize_email_conditional
|
|
5 |
from llama2_response_mail_generator import generate_email_response
|
6 |
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
|
7 |
import torch
|
8 |
-
import spacy
|
9 |
from huggingface_hub import hf_hub_download
|
10 |
from llama_cpp import Llama
|
11 |
import subprocess
|
12 |
-
|
|
|
|
|
13 |
|
14 |
"""**Original code**
|
15 |
|
|
|
5 |
from llama2_response_mail_generator import generate_email_response
|
6 |
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
|
7 |
import torch
|
|
|
8 |
from huggingface_hub import hf_hub_download
|
9 |
from llama_cpp import Llama
|
10 |
import subprocess
|
11 |
+
subprocess.run(["pip", "install", "spacy==3.0.6", "--no-build-isolation"], check=True)
|
12 |
+
subprocess.run(["pip", "-m", "spacy", "download", "en_core_web_sm"], check=True)
|
13 |
+
import spacy
|
14 |
|
15 |
"""**Original code**
|
16 |
|
requirements.txt
CHANGED
@@ -2,7 +2,7 @@ transformers
|
|
2 |
datasets
|
3 |
torch
|
4 |
gradio
|
5 |
-
spacy
|
6 |
llama-cpp-python
|
7 |
numpy
|
8 |
huggingface_hub
|
|
|
|
2 |
datasets
|
3 |
torch
|
4 |
gradio
|
|
|
5 |
llama-cpp-python
|
6 |
numpy
|
7 |
huggingface_hub
|
8 |
+
https://huggingface.co/spacy/en_core_web_sm/resolve/main/en_core_web_sm-any-py3-none-any.whl
|