File size: 810 Bytes
d03fa96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21

from transformers import AutoTokenizer, AutoModelForQuestionAnswering, pipeline
import torch


class QuestionAnswering:
    def __init__(self):
        self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
        self.tokenizer = AutoTokenizer.from_pretrained("savasy/bert-base-turkish-squad")
        if self.device == "cuda":
            self.model = AutoModelForQuestionAnswering.from_pretrained("savasy/bert-base-turkish-squad").cuda()
        else:
            self.model = AutoModelForQuestionAnswering.from_pretrained("savasy/bert-base-turkish-squad")

        self.pipe = pipeline("question-answering", model=self.model, tokenizer=self.tokenizer)

    def prediction(self, question, context):
        result = self.pipe(question=question, context=context)

        return result