michaelfeil commited on
Commit
35d9757
·
verified ·
1 Parent(s): 30cf7fd

Update 1_Pooling/config.json

Browse files

Context: Embedding Models with causal mask only have the full attention context on the last token. Other pooling methods do not make sense!

Files changed (1) hide show
  1. 1_Pooling/config.json +2 -2
1_Pooling/config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "word_embedding_dimension": 896,
3
  "pooling_mode_cls_token": false,
4
- "pooling_mode_mean_tokens": true,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
8
- "pooling_mode_lasttoken": false,
9
  "include_prompt": true
10
  }
 
1
  {
2
  "word_embedding_dimension": 896,
3
  "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": true,
9
  "include_prompt": true
10
  }