Update README.md
Browse files
README.md
CHANGED
@@ -14,8 +14,8 @@ large batch size of 0.5M tokens. A larger 762 million parameter model can also b
|
|
14 |
|
15 |
```python
|
16 |
from transformers import AutoTokenizer,AutoModel
|
17 |
-
model =
|
18 |
-
tokenizer =
|
19 |
|
20 |
```
|
21 |
|
|
|
14 |
|
15 |
```python
|
16 |
from transformers import AutoTokenizer,AutoModel
|
17 |
+
model = MegatronBertModel.from_pretrained("mmukh/SOBertBase")
|
18 |
+
tokenizer = BertTokenizerFast.from_pretrained("mmukh/SOBertBase")
|
19 |
|
20 |
```
|
21 |
|