julian-fong
commited on
Commit
•
61a2806
1
Parent(s):
fe5b143
Upload model
Browse files- README.md +42 -0
- adapter_config.json +20 -0
- head_config.json +22 -0
- pytorch_adapter.bin +3 -0
- pytorch_model_head.bin +3 -0
README.md
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- adapter-transformers
|
4 |
+
- roberta
|
5 |
+
datasets:
|
6 |
+
- mnli
|
7 |
+
---
|
8 |
+
|
9 |
+
# Adapter `julian-fong/roberta-base-reft-adapter` for roberta-base
|
10 |
+
|
11 |
+
An [adapter](https://adapterhub.ml) for the `roberta-base` model that was trained on the [mnli](https://huggingface.co/datasets/mnli/) dataset and includes a prediction head for classification.
|
12 |
+
|
13 |
+
This adapter was created for usage with the **[Adapters](https://github.com/Adapter-Hub/adapters)** library.
|
14 |
+
|
15 |
+
## Usage
|
16 |
+
|
17 |
+
First, install `adapters`:
|
18 |
+
|
19 |
+
```
|
20 |
+
pip install -U adapters
|
21 |
+
```
|
22 |
+
|
23 |
+
Now, the adapter can be loaded and activated like this:
|
24 |
+
|
25 |
+
```python
|
26 |
+
from adapters import AutoAdapterModel
|
27 |
+
|
28 |
+
model = AutoAdapterModel.from_pretrained("roberta-base")
|
29 |
+
adapter_name = model.load_adapter("julian-fong/roberta-base-reft-adapter", set_active=True)
|
30 |
+
```
|
31 |
+
|
32 |
+
## Architecture & Training
|
33 |
+
|
34 |
+
<!-- Add some description here -->
|
35 |
+
|
36 |
+
## Evaluation results
|
37 |
+
|
38 |
+
<!-- Add some description here -->
|
39 |
+
|
40 |
+
## Citation
|
41 |
+
|
42 |
+
<!-- Add some description here -->
|
adapter_config.json
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"architecture": "reft",
|
4 |
+
"dropout": 0.05,
|
5 |
+
"layers": "all",
|
6 |
+
"non_linearity": null,
|
7 |
+
"orthogonality": true,
|
8 |
+
"output_reft": true,
|
9 |
+
"prefix_positions": 1,
|
10 |
+
"r": 1,
|
11 |
+
"suffix_positions": 0,
|
12 |
+
"tied_weights": false
|
13 |
+
},
|
14 |
+
"hidden_size": 768,
|
15 |
+
"model_class": "RobertaAdapterModel",
|
16 |
+
"model_name": "roberta-base",
|
17 |
+
"model_type": "roberta",
|
18 |
+
"name": "loreft_adapter",
|
19 |
+
"version": "adapters.1.0.0"
|
20 |
+
}
|
head_config.json
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"activation_function": "tanh",
|
4 |
+
"bias": true,
|
5 |
+
"dropout_prob": null,
|
6 |
+
"head_type": "classification",
|
7 |
+
"label2id": {
|
8 |
+
"LABEL_0": 0,
|
9 |
+
"LABEL_1": 1,
|
10 |
+
"LABEL_2": 2
|
11 |
+
},
|
12 |
+
"layers": 2,
|
13 |
+
"num_labels": 3,
|
14 |
+
"use_pooler": false
|
15 |
+
},
|
16 |
+
"hidden_size": 768,
|
17 |
+
"model_class": "RobertaAdapterModel",
|
18 |
+
"model_name": "roberta-base",
|
19 |
+
"model_type": "roberta",
|
20 |
+
"name": "loreft_adapter",
|
21 |
+
"version": "adapters.1.0.0"
|
22 |
+
}
|
pytorch_adapter.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6879078240921a79271ad8313f89c2735fa6232272a5cf3508b6a5d3782e5277
|
3 |
+
size 56808534
|
pytorch_model_head.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd7fe0d613637fc657d95aa81c15c38b6697fa731d9cd1963a1f6305b3dc376e
|
3 |
+
size 2373736
|