Update README.md
Browse files
README.md
CHANGED
@@ -26,8 +26,8 @@ Now, the adapter can be loaded and activated like this:
|
|
26 |
```python
|
27 |
from transformers import AutoAdapterModel
|
28 |
|
29 |
-
model = AutoAdapterModel.from_pretrained("allenai/
|
30 |
-
adapter_name = model.load_adapter("allenai/
|
31 |
```
|
32 |
## SPECTER 2.0
|
33 |
|
@@ -90,13 +90,13 @@ It builds on the work done in [SciRepEval: A Multi-Format Benchmark for Scientif
|
|
90 |
from transformers import AutoTokenizer, AutoModel
|
91 |
|
92 |
# load model and tokenizer
|
93 |
-
tokenizer = AutoTokenizer.from_pretrained('allenai/
|
94 |
|
95 |
#load base model
|
96 |
-
model = AutoModel.from_pretrained('allenai/
|
97 |
|
98 |
#load the adapter(s) as per the required task, provide an identifier for the adapter in load_as argument and activate it
|
99 |
-
model.load_adapter("allenai/
|
100 |
|
101 |
papers = [{'title': 'BERT', 'abstract': 'We introduce a new language representation model called BERT'},
|
102 |
{'title': 'Attention is all you need', 'abstract': ' The dominant sequence transduction models are based on complex recurrent or convolutional neural networks'}]
|
|
|
26 |
```python
|
27 |
from transformers import AutoAdapterModel
|
28 |
|
29 |
+
model = AutoAdapterModel.from_pretrained("allenai/specter2_base")
|
30 |
+
adapter_name = model.load_adapter("allenai/specter2", source="hf", set_active=True)
|
31 |
```
|
32 |
## SPECTER 2.0
|
33 |
|
|
|
90 |
from transformers import AutoTokenizer, AutoModel
|
91 |
|
92 |
# load model and tokenizer
|
93 |
+
tokenizer = AutoTokenizer.from_pretrained('allenai/specter2_base')
|
94 |
|
95 |
#load base model
|
96 |
+
model = AutoModel.from_pretrained('allenai/specter2_base')
|
97 |
|
98 |
#load the adapter(s) as per the required task, provide an identifier for the adapter in load_as argument and activate it
|
99 |
+
model.load_adapter("allenai/specter2", source="hf", load_as="specter2", set_active=True)
|
100 |
|
101 |
papers = [{'title': 'BERT', 'abstract': 'We introduce a new language representation model called BERT'},
|
102 |
{'title': 'Attention is all you need', 'abstract': ' The dominant sequence transduction models are based on complex recurrent or convolutional neural networks'}]
|