poltextlab commited on
Commit
5a69359
·
verified ·
1 Parent(s): 07acd42

Update interfaces/cap.py

Browse files
Files changed (1) hide show
  1. interfaces/cap.py +7 -7
interfaces/cap.py CHANGED
@@ -95,28 +95,28 @@ def build_huggingface_path(language: str, domain: str):
95
 
96
  def predict(text, model_id, tokenizer_id):
97
  device = torch.device("cpu")
98
- gradio.Info("Loading model")
99
  model = AutoModelForSequenceClassification.from_pretrained(model_id, token=HF_TOKEN)
100
- gradio.Info("Loading tokenizer")
101
  tokenizer = AutoTokenizer.from_pretrained(tokenizer_id)
102
- gradio.Info("Mapping model to device")
103
  model.to(device)
104
 
105
- gradio.Info("Tokenizing")
106
  inputs = tokenizer(text,
107
  max_length=4,
108
  truncation=True,
109
  padding="do_not_pad",
110
  return_tensors="pt").to(device)
111
 
112
- gradio.Info("model.eval()")
113
  model.eval()
114
 
115
- gradio.Info("Prediction")
116
  with torch.no_grad():
117
  logits = model(**inputs).logits
118
 
119
- gradio.Info("Softmax")
120
  probs = torch.nn.functional.softmax(logits, dim=1).cpu().numpy().flatten()
121
  output_pred = {f"[{CAP_NUM_DICT[i]}] {CAP_LABEL_NAMES[CAP_NUM_DICT[i]]}": probs[i] for i in np.argsort(probs)[::-1]}
122
  output_info = f'<p style="text-align: center; display: block">Prediction was made using the <a href="https://huggingface.co/{model_id}">{model_id}</a> model.</p>'
 
95
 
96
  def predict(text, model_id, tokenizer_id):
97
  device = torch.device("cpu")
98
+ gr.Info("Loading model")
99
  model = AutoModelForSequenceClassification.from_pretrained(model_id, token=HF_TOKEN)
100
+ gr.Info("Loading tokenizer")
101
  tokenizer = AutoTokenizer.from_pretrained(tokenizer_id)
102
+ gr.Info("Mapping model to device")
103
  model.to(device)
104
 
105
+ gr.Info("Tokenizing")
106
  inputs = tokenizer(text,
107
  max_length=4,
108
  truncation=True,
109
  padding="do_not_pad",
110
  return_tensors="pt").to(device)
111
 
112
+ gr.Info("model.eval()")
113
  model.eval()
114
 
115
+ gr.Info("Prediction")
116
  with torch.no_grad():
117
  logits = model(**inputs).logits
118
 
119
+ gr.Info("Softmax")
120
  probs = torch.nn.functional.softmax(logits, dim=1).cpu().numpy().flatten()
121
  output_pred = {f"[{CAP_NUM_DICT[i]}] {CAP_LABEL_NAMES[CAP_NUM_DICT[i]]}": probs[i] for i in np.argsort(probs)[::-1]}
122
  output_info = f'<p style="text-align: center; display: block">Prediction was made using the <a href="https://huggingface.co/{model_id}">{model_id}</a> model.</p>'