Spaces:
Sleeping
Sleeping
Separate model names from display names
Browse files
app.py
CHANGED
@@ -7,19 +7,26 @@ import numpy as np
|
|
7 |
|
8 |
# Load models
|
9 |
|
10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
with open(model_probs_path) as f:
|
12 |
model_probs = json.load(f)
|
13 |
|
14 |
-
nn_model_path = hf_hub_download(repo_id="tbitai/
|
15 |
nn_model = tf.keras.models.load_model(nn_model_path)
|
16 |
|
17 |
-
llm_model_path = hf_hub_download(repo_id="tbitai/
|
18 |
llm_model = tf.keras.models.load_model(llm_model_path)
|
19 |
# Sentence Transformers should be imported after Keras models, in order to prevent it from setting Keras to legacy.
|
20 |
from sentence_transformers import SentenceTransformer
|
21 |
st_model = SentenceTransformer("avsolatorio/GIST-large-Embedding-v0")
|
22 |
|
|
|
23 |
# Utils for Bayes
|
24 |
|
25 |
UNK = '[UNK]'
|
@@ -70,12 +77,6 @@ def predict_llm(text):
|
|
70 |
embedding = st_model.encode(text)
|
71 |
return float(llm_model(np.array([embedding]))[0][0].numpy())
|
72 |
|
73 |
-
MODELS = [
|
74 |
-
BAYES := "Bayes Enron1 spam",
|
75 |
-
NN := "NN Enron1 spam",
|
76 |
-
LLM := "GISTy Enron1 spam",
|
77 |
-
]
|
78 |
-
|
79 |
def predict(model, input_txt, unbiased, intr_threshold):
|
80 |
if model == BAYES:
|
81 |
return predict_bayes(input_txt, unbiased=unbiased, intr_threshold=intr_threshold)
|
|
|
7 |
|
8 |
# Load models
|
9 |
|
10 |
+
MODELS = [
|
11 |
+
(BAYES := "bayes-enron1-spam", "Bayes Enron1 spam"),
|
12 |
+
(NN := "nn-enron1-spam", "NN Enron1 spam"),
|
13 |
+
(LLM := "gisty-enron1-spam", "GISTy Enron1 spam"),
|
14 |
+
]
|
15 |
+
|
16 |
+
model_probs_path = hf_hub_download(repo_id=f"tbitai/{BAYES}", filename="probs.json")
|
17 |
with open(model_probs_path) as f:
|
18 |
model_probs = json.load(f)
|
19 |
|
20 |
+
nn_model_path = hf_hub_download(repo_id=f"tbitai/{NN}", filename="nn-enron1-spam.keras")
|
21 |
nn_model = tf.keras.models.load_model(nn_model_path)
|
22 |
|
23 |
+
llm_model_path = hf_hub_download(repo_id=f"tbitai/{LLM}", filename="gisty-enron1-spam.keras")
|
24 |
llm_model = tf.keras.models.load_model(llm_model_path)
|
25 |
# Sentence Transformers should be imported after Keras models, in order to prevent it from setting Keras to legacy.
|
26 |
from sentence_transformers import SentenceTransformer
|
27 |
st_model = SentenceTransformer("avsolatorio/GIST-large-Embedding-v0")
|
28 |
|
29 |
+
|
30 |
# Utils for Bayes
|
31 |
|
32 |
UNK = '[UNK]'
|
|
|
77 |
embedding = st_model.encode(text)
|
78 |
return float(llm_model(np.array([embedding]))[0][0].numpy())
|
79 |
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
def predict(model, input_txt, unbiased, intr_threshold):
|
81 |
if model == BAYES:
|
82 |
return predict_bayes(input_txt, unbiased=unbiased, intr_threshold=intr_threshold)
|