do not merge! mistral example

#1
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "Skywork/Skywork-Reward-Llama-3.1-8B-v0.2",
3
  "architectures": [
4
- "LlamaForSequenceClassification"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
@@ -24,7 +24,7 @@
24
  },
25
  "max_position_embeddings": 131072,
26
  "mlp_bias": false,
27
- "model_type": "llama",
28
  "num_attention_heads": 32,
29
  "num_hidden_layers": 32,
30
  "num_key_value_heads": 8,
 
1
  {
2
  "_name_or_path": "Skywork/Skywork-Reward-Llama-3.1-8B-v0.2",
3
  "architectures": [
4
+ "MistralForSequenceClassification"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
24
  },
25
  "max_position_embeddings": 131072,
26
  "mlp_bias": false,
27
+ "model_type": "mistral",
28
  "num_attention_heads": 32,
29
  "num_hidden_layers": 32,
30
  "num_key_value_heads": 8,