hypencoder.6_layer / config.json
jfkback's picture
Upload model
8b46360 verified
raw
history blame
724 Bytes
{
"architectures": [
"HypencoderDualEncoder"
],
"base_encoder_output_dim": 768,
"loss_kwargs": [
{}
],
"loss_type": [
"margin_mse"
],
"passage_encoder_kwargs": {
"model_name_or_path": "google-bert/bert-base-uncased",
"pooling_type": "cls"
},
"passage_encoder_type": "",
"query_encoder_kwargs": {
"converter_kwargs": {
"activation_type": "relu",
"vector_dimensions": [
768,
768,
768,
768,
768,
768,
768,
1
]
},
"model_name_or_path": "google-bert/bert-base-uncased"
},
"query_encoder_type": "",
"shared_encoder": true,
"torch_dtype": "float32",
"transformers_version": "4.48.2"
}