vikram71198 commited on
Commit
5614eac
1 Parent(s): f25317c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -55,7 +55,7 @@ I also trained for 3 full epochs on Colab's Tesla P100-PCIE-16GB GPU.
55
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
56
  import torch.nn as nn
57
  tokenizer = AutoTokenizer.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
58
- model = AutoModelForSequenceClassification.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection"
59
  #Following the same truncation & padding strategy used while training
60
  encoded_input = tokenizer("Enter any news article to be classified. Can be a list of articles too.", truncation = True, padding = "max_length", max_length = 512, return_tensors='pt')
61
  output = model(**encoded_input)["logits"]
 
55
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
56
  import torch.nn as nn
57
  tokenizer = AutoTokenizer.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
58
+ model = AutoModelForSequenceClassification.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
59
  #Following the same truncation & padding strategy used while training
60
  encoded_input = tokenizer("Enter any news article to be classified. Can be a list of articles too.", truncation = True, padding = "max_length", max_length = 512, return_tensors='pt')
61
  output = model(**encoded_input)["logits"]