rusen commited on
Commit
c80b311
·
1 Parent(s): 3c8fdbd

Updated app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -1,17 +1,24 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
  import numpy as np
 
4
 
5
 
6
- roberta_base_detector = pipeline("text-classification", model="Models/fine_tuned/roberta-base-openai-detector-model", tokenizer="Models/fine_tuned/roberta-base-openai-detector-tokenizer")
7
- chatgpt_lli_hc3_detector = pipeline("text-classification", model="Models/fine_tuned/chatgpt-detector-lli-hc3-model", tokenizer="Models/fine_tuned/chatgpt-detector-lli-hc3-tokenizer")
8
- chatgpt_roberta_detector = pipeline("text-classification", model="Models/fine_tuned/chatgpt-detector-roberta-model", tokenizer="Models/fine_tuned/chatgpt-detector-roberta-tokenizer")
 
 
 
 
 
 
9
 
10
  def classify_text(text):
11
  # Get predictions from each model
12
- roberta_base_pred = 1 if roberta_base_detector(text)[0]['label'] == "Fake" else: 0
13
- chatgpt_lli_hc3_pred = chatgpt_lli_hc3_detector(text)[0]['label']
14
- chatgpt_roberta_pred = chatgpt_roberta_detector(text)[0]['label']
15
 
16
  # Count the votes for AI and Human
17
  votes = {"AI": 0, "Human": 0}
@@ -23,9 +30,9 @@ def classify_text(text):
23
 
24
  # Determine final decision based on majority
25
  if votes["AI"] > votes["Human"]:
26
- return chatgpt_lli_hc3_pred
27
  else:
28
- return chatgpt_lli_hc3_pred
29
 
30
  # Create Gradio Interface
31
  iface = gr.Interface(
 
1
  import gradio as gr
2
  from transformers import pipeline
3
  import numpy as np
4
+ from utils import model_predict
5
 
6
 
7
+ roberta_base_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/roberta_base_detector-model")
8
+ roberta_base_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/roberta_base_detector-tokenizer")
9
+
10
+ chatgpt_lli_hc3_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/chatgpt_lli_hc3_detector-model")
11
+ chatgpt_lli_hc3_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/chatgpt_lli_hc3_detector-tokenizer")
12
+
13
+ chatgpt_roberta_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/chatgpt_roberta_detector-model")
14
+ chatgpt_roberta_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/chatgpt_roberta_detector-tokenizer")
15
+
16
 
17
  def classify_text(text):
18
  # Get predictions from each model
19
+ roberta_base_pred = model_predict(roberta_base_detector, roberta_base_detector_tknz, text)
20
+ chatgpt_lli_hc3_pred = model_predict(chatgpt_lli_hc3_detector, chatgpt_lli_hc3_detector_tknz, text)
21
+ chatgpt_roberta_pred = model_predict(chatgpt_roberta_detector, chatgpt_roberta_detector_tknz, text)
22
 
23
  # Count the votes for AI and Human
24
  votes = {"AI": 0, "Human": 0}
 
30
 
31
  # Determine final decision based on majority
32
  if votes["AI"] > votes["Human"]:
33
+ return "AI"
34
  else:
35
+ return "Human"
36
 
37
  # Create Gradio Interface
38
  iface = gr.Interface(