calpt commited on
Commit
94b177d
·
verified ·
1 Parent(s): 1c924dc

Add adapter bert-base-multilingual-cased_wikiann_ner_ar_pfeiffer version 1

Browse files
378b4224dce1e2fb7746784b44b5ef0f888c574d272610e048805bc37ef2321c-e9f6f226845dce46ed8b0b8a7995dc2d55ad59e0c227f53262865d5c500d50ef-extracted/adapter_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "adapter_residual_before_ln": false,
4
+ "attention_type": null,
5
+ "invertible_adapter": {
6
+ "block_type": "nice",
7
+ "non_linearity": "relu",
8
+ "reduction_factor": 2
9
+ },
10
+ "leave_out": [
11
+ 11
12
+ ],
13
+ "ln_after": false,
14
+ "ln_before": false,
15
+ "mh_adapter": false,
16
+ "new_attention_norm": null,
17
+ "non_linearity": "gelu",
18
+ "original_ln_after": true,
19
+ "original_ln_before": true,
20
+ "output_adapter": true,
21
+ "reduction_factor": 16,
22
+ "residual_before_ln": true
23
+ },
24
+ "config_id": "ba54af8d6f8f70ac",
25
+ "hidden_size": 768,
26
+ "model_class": "BertForTokenClassification",
27
+ "model_name": "bert-base-multilingual-cased",
28
+ "model_type": "bert",
29
+ "name": "ner",
30
+ "type": "text_task"
31
+ }
378b4224dce1e2fb7746784b44b5ef0f888c574d272610e048805bc37ef2321c-e9f6f226845dce46ed8b0b8a7995dc2d55ad59e0c227f53262865d5c500d50ef-extracted/head_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": null,
3
+ "hidden_size": 768,
4
+ "model_class": "BertForTokenClassification",
5
+ "model_name": "bert-base-multilingual-cased",
6
+ "model_type": "bert",
7
+ "name": null
8
+ }
378b4224dce1e2fb7746784b44b5ef0f888c574d272610e048805bc37ef2321c-e9f6f226845dce46ed8b0b8a7995dc2d55ad59e0c227f53262865d5c500d50ef-extracted/pytorch_adapter.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6acf16bff656eef374eb830927d9565e80b5978c9cdbc540758afacfbe86b874
3
+ size 3288923
378b4224dce1e2fb7746784b44b5ef0f888c574d272610e048805bc37ef2321c-e9f6f226845dce46ed8b0b8a7995dc2d55ad59e0c227f53262865d5c500d50ef-extracted/pytorch_model_head.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19a8a461b0355f720711f308fed27ed4c7ef63afda3c5ce7eb255c28c6b1ca69
3
+ size 22034
README.md ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - bert
4
+ - adapterhub:wikiann/ar
5
+ - adapter-transformers
6
+ - token-classification
7
+ license: "apache-2.0"
8
+ ---
9
+
10
+ # Adapter `bert-base-multilingual-cased_wikiann_ner_ar_pfeiffer` for bert-base-multilingual-cased
11
+
12
+ Stacked adapter on top of Language adapter. MAD-X 2.0 style. The language adapters in the last layer (layer 11) are deleted.
13
+
14
+ **This adapter was created for usage with the [Adapters](https://github.com/Adapter-Hub/adapters) library.**
15
+
16
+ ## Usage
17
+
18
+ First, install `adapters`:
19
+
20
+ ```
21
+ pip install -U adapters
22
+ ```
23
+
24
+ Now, the adapter can be loaded and activated like this:
25
+
26
+ ```python
27
+ from adapters import AutoAdapterModel
28
+
29
+ model = AutoAdapterModel.from_pretrained("bert-base-multilingual-cased")
30
+ adapter_name = model.load_adapter("AdapterHub/bert-base-multilingual-cased_wikiann_ner_ar_pfeiffer")
31
+ model.set_active_adapters(adapter_name)
32
+ ```
33
+
34
+ ## Architecture & Training
35
+
36
+ - Adapter architecture: pfeiffer
37
+ - Prediction head: tagging
38
+ - Dataset: [Arabic](https://adapterhub.ml/explore/wikiann/ar/)
39
+
40
+ ## Author Information
41
+
42
+ - Author name(s): Jonas Pfeiffer
43
+ - Author email: [email protected]
44
+ - Author links: [Website](https://pfeiffer.ai), [GitHub](https://github.com/JoPfeiff), [Twitter](https://twitter.com/@PfeiffJo)
45
+
46
+ ## Versions
47
+ - `1` **(main)**
48
+ - `2`
49
+ - `3`
50
+ - `4`
51
+ - `5`
52
+
53
+ ## Citation
54
+
55
+ ```bibtex
56
+ @article{Pfeiffer21UNKs,
57
+ author = {Jonas Pfeiffer and
58
+ Ivan Vuli\'{c} and
59
+ Iryna Gurevych and
60
+ Sebastian Ruder},
61
+ title = {{UNKs Everywhere: Adapting Multilingual Language Models to New Scripts}},
62
+ journal = {arXiv preprint},
63
+ year = {2021} ,
64
+ url = {https://arxiv.org/abs/2012.15562}
65
+ }
66
+
67
+ ```
68
+
69
+ *This adapter has been auto-imported from https://github.com/Adapter-Hub/Hub/blob/master/adapters/ukp/bert-base-multilingual-cased_wikiann_ner_ar_pfeiffer.yaml*.
adapter_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "adapter_residual_before_ln": false,
4
+ "cross_adapter": false,
5
+ "dropout": 0.0,
6
+ "factorized_phm_W": true,
7
+ "factorized_phm_rule": false,
8
+ "hypercomplex_nonlinearity": "glorot-uniform",
9
+ "init_weights": "bert",
10
+ "inv_adapter": null,
11
+ "inv_adapter_reduction_factor": null,
12
+ "is_parallel": false,
13
+ "learn_phm": true,
14
+ "leave_out": [
15
+ 11
16
+ ],
17
+ "ln_after": false,
18
+ "ln_before": false,
19
+ "mh_adapter": false,
20
+ "non_linearity": "gelu_new",
21
+ "original_ln_after": true,
22
+ "original_ln_before": true,
23
+ "output_adapter": true,
24
+ "phm_bias": true,
25
+ "phm_c_init": "normal",
26
+ "phm_dim": 4,
27
+ "phm_init_range": 0.0001,
28
+ "phm_layer": false,
29
+ "phm_rank": 1,
30
+ "reduction_factor": 16,
31
+ "residual_before_ln": true,
32
+ "scaling": 1.0,
33
+ "shared_W_phm": false,
34
+ "shared_phm_rule": true,
35
+ "use_gating": false
36
+ },
37
+ "hidden_size": 768,
38
+ "model_class": "BertAdapterModel",
39
+ "model_name": "bert-base-multilingual-cased",
40
+ "model_type": "bert",
41
+ "name": "ner",
42
+ "version": "0.2.0"
43
+ }
head_config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "activation_function": null,
4
+ "dropout_prob": null,
5
+ "head_type": "tagging",
6
+ "label2id": {
7
+ "0": 0,
8
+ "B-LOC": 5,
9
+ "B-ORG": 3,
10
+ "B-PER": 1,
11
+ "I-LOC": 6,
12
+ "I-ORG": 4,
13
+ "I-PER": 2
14
+ },
15
+ "layers": 1,
16
+ "num_labels": 7
17
+ },
18
+ "hidden_size": 768,
19
+ "model_class": "BertAdapterModel",
20
+ "model_name": "bert-base-multilingual-cased",
21
+ "model_type": "bert",
22
+ "name": "ner",
23
+ "version": "0.2.0"
24
+ }
pytorch_adapter.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e79770e4360ee3ba1aab1f42ef5dcdb7bcdc02395e02479774b280ac181619f
3
+ size 3295122
pytorch_model_head.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe4713d6d5164ae1ccf1764c2440c3671babf7bbb0051bd37fb1b324cb8f7826
3
+ size 23066