manu commited on
Commit
4d26442
·
1 Parent(s): a6b3520

added file for question storing

Browse files
Files changed (2) hide show
  1. Questions.txt +0 -0
  2. app.py +6 -1
Questions.txt ADDED
File without changes
app.py CHANGED
@@ -9,6 +9,7 @@ checkpoint="MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli"
9
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
10
  model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
11
  answers=['Yes',"Doesn't matter","No"]
 
12
 
13
  title = "The Seagull story"
14
  description = """
@@ -66,10 +67,14 @@ def generate_tone(index,question):
66
  """.replace("\n","")
67
 
68
  ]
 
69
  input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
70
  output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
71
  prediction = torch.softmax(output["logits"][0], -1).tolist()
72
- return answers[np.argmax(prediction)]
 
 
 
73
 
74
  passages=["General","Pier","Boat","Island"]
75
 
 
9
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
10
  model=AutoModelForSequenceClassification.from_pretrained(checkpoint)
11
  answers=['Yes',"Doesn't matter","No"]
12
+ f=open("Questions.txt","a")
13
 
14
  title = "The Seagull story"
15
  description = """
 
67
  """.replace("\n","")
68
 
69
  ]
70
+
71
  input = tokenizer(passages[index], question, truncation=True, return_tensors="pt")
72
  output = model(input["input_ids"].to("cpu")) # device = "cuda:0" or "cpu"
73
  prediction = torch.softmax(output["logits"][0], -1).tolist()
74
+ response=answers[np.argmax(prediction)]
75
+
76
+ f.write(f'Passage = {index}\nQuestion: {question}\nAnswer: {response}\n\n')
77
+ return response
78
 
79
  passages=["General","Pier","Boat","Island"]
80