Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -34,20 +34,22 @@ tokenizer = AutoTokenizer.from_pretrained("medalpaca/medalpaca-7b")
|
|
34 |
|
35 |
#load the first interface
|
36 |
|
37 |
-
loaded_rf = joblib.load("model_joblib")
|
38 |
-
|
39 |
def fn(*args):
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
|
|
|
|
|
48 |
|
49 |
|
50 |
|
|
|
|
|
51 |
|
52 |
|
53 |
demo = gr.Interface(
|
@@ -88,7 +90,8 @@ def predict(message, history):
|
|
88 |
return tokenizer.decode(output_tokens[0], skip_special_tokens=True).replace(prompt,"")
|
89 |
|
90 |
|
91 |
-
|
|
|
92 |
|
93 |
|
94 |
|
@@ -97,4 +100,4 @@ chatbot=gr.ChatInterface(predict, chatbot=gr.Chatbot(value=[(None, Fmessage)],),
|
|
97 |
|
98 |
gr.TabbedInterface(
|
99 |
[demo, chatbot], ["symptoms checker", "chatbot"]
|
100 |
-
).launch()
|
|
|
34 |
|
35 |
#load the first interface
|
36 |
|
|
|
|
|
37 |
def fn(*args):
|
38 |
+
global symptoms
|
39 |
+
all_symptoms = [symptom for symptom_list in args for symptom in symptom_list]
|
40 |
+
|
41 |
+
if len(all_symptoms) > 17:
|
42 |
+
raise gr.Error("Please select a maximum of 17 symptoms.")
|
43 |
+
elif len(all_symptoms) < 3:
|
44 |
+
raise gr.Error("Please select at least 3 symptoms.")
|
45 |
|
46 |
+
symptoms = all_symptoms # Update global symptoms list
|
47 |
+
return predd(loaded_rf,symptoms)
|
48 |
|
49 |
|
50 |
|
51 |
+
symptoms = []
|
52 |
+
|
53 |
|
54 |
|
55 |
demo = gr.Interface(
|
|
|
90 |
return tokenizer.decode(output_tokens[0], skip_special_tokens=True).replace(prompt,"")
|
91 |
|
92 |
|
93 |
+
loaded_rf = joblib.load("model_joblib")
|
94 |
+
Fmessage="hello im here to help you!"
|
95 |
|
96 |
|
97 |
|
|
|
100 |
|
101 |
gr.TabbedInterface(
|
102 |
[demo, chatbot], ["symptoms checker", "chatbot"]
|
103 |
+
).queue().launch()
|