PeteBleackley commited on
Commit
0941a89
·
1 Parent(s): 50de02e

Low level RoBERTa layers don't necessarily return what I expect them to

Browse files
Files changed (1) hide show
  1. qarac/models/QaracDecoderModel.py +1 -1
qarac/models/QaracDecoderModel.py CHANGED
@@ -82,7 +82,7 @@ class QaracDecoderHead(keras.layers.Layer):
82
  None,
83
  False,
84
  training)
85
- return self.head(self.layer_1(l0.last_hidden_state[:,1:],
86
  attention_mask,
87
  None,
88
  None,
 
82
  None,
83
  False,
84
  training)
85
+ return self.head(self.layer_1(l0[0][:,1:],
86
  attention_mask,
87
  None,
88
  None,