k-l-lambda commited on
Commit
29975e7
·
1 Parent(s): 099ebfa
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -15,6 +15,7 @@
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 77,
17
  "model_type": "clip_text_model",
 
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 12,
20
  "pad_token_id": 1,
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 77,
17
  "model_type": "clip_text_model",
18
+ "name": "text/20221023.b-sentencegen-mj",
19
  "num_attention_heads": 12,
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 1,