wynn-lee commited on
Commit
6a9e19c
1 Parent(s): f9f84be

Upload MistralForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/drive/My Drive/huggingface_cache/mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -37,6 +37,6 @@
37
  "tie_word_embeddings": false,
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.38.2",
40
- "use_cache": true,
41
  "vocab_size": 32000
42
  }
 
1
  {
2
+ "_name_or_path": "/content/drive/MyDrive/huggingface_cache/mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
37
  "tie_word_embeddings": false,
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.38.2",
40
+ "use_cache": false,
41
  "vocab_size": 32000
42
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f4a76b3c444a566dba4577f7b13c162039cbc87247466186f67b2983063da9be
3
  size 4975659216
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e053dc482ae1538ca2d4626aa819be51ee5010fe326542c3004bfdb424d3cfc7
3
  size 4975659216