nferroukhi commited on
Commit
16b4251
1 Parent(s): 78dd0f0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "ehartford/WizardLM-Uncensored-Falcon-7b",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
@@ -7,8 +7,8 @@
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
- "AutoConfig": "ehartford/WizardLM-Uncensored-Falcon-7b--configuration_RW.RWConfig",
11
- "AutoModelForCausalLM": "ehartford/WizardLM-Uncensored-Falcon-7b--modelling_RW.RWForCausalLM"
12
  },
13
  "bias": false,
14
  "bos_token_id": 1,
 
1
  {
2
+ "_name_or_path": "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
 
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
+ "AutoConfig": "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16--configuration_RW.RWConfig",
11
+ "AutoModelForCausalLM": "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16--modelling_RW.RWForCausalLM"
12
  },
13
  "bias": false,
14
  "bos_token_id": 1,