Spaces:
Runtime error
Runtime error
from transformers import AutoTokenizer, AutoModelForQuestionAnswering, pipeline | |
import torch | |
class QuestionAnswering: | |
def __init__(self): | |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
self.tokenizer = AutoTokenizer.from_pretrained("savasy/bert-base-turkish-squad") | |
if self.device == "cuda": | |
self.model = AutoModelForQuestionAnswering.from_pretrained("savasy/bert-base-turkish-squad").cuda() | |
else: | |
self.model = AutoModelForQuestionAnswering.from_pretrained("savasy/bert-base-turkish-squad") | |
self.pipe = pipeline("question-answering", model=self.model, tokenizer=self.tokenizer) | |
def prediction(self, question, context): | |
result = self.pipe(question=question, context=context) | |
return result | |